Here, we use the code to evaluate methods based on the dyngen simulation, but enforce a more discriminative prior between true and noisy edges.
library(tidyverse)
library(dyngen)
nIter <- 6
for(nn in 1:nIter){
# generate a dataset
set.seed(nn*10)
backbone <- backbone_linear()
model <- initialise_model(
backbone = backbone,
num_tfs = 100,
num_targets = 500,
num_hks = 100,
tf_network_params = tf_network_default(sample_num_regulators = function() 2),
feature_network_params = feature_network_default(max_in_degree=20),
download_cache_dir = "~/.cache/dyngen",
num_cores = 4,
simulation_params = simulation_default(
census_interval = 1,
compute_cellwise_grn = TRUE
)
)
dataset <- model %>%
generate_tf_network() %>%
generate_feature_network() %>%
generate_kinetics() %>%
generate_gold_standard() %>%
generate_cells() %>%
generate_experiment()
saveRDS(list(model,dataset),
file = paste0("datasets/simIters/dataset",nn,".rds"))
}
library(msigdbr)
library(GSEABase)
## Loading required package: BiocGenerics
##
## Attaching package: 'BiocGenerics'
## The following objects are masked from 'package:stats':
##
## IQR, mad, sd, var, xtabs
## The following objects are masked from 'package:base':
##
## anyDuplicated, aperm, append, as.data.frame, basename, cbind,
## colnames, dirname, do.call, duplicated, eval, evalq, Filter, Find,
## get, grep, grepl, intersect, is.unsorted, lapply, Map, mapply,
## match, mget, order, paste, pmax, pmax.int, pmin, pmin.int,
## Position, rank, rbind, Reduce, rownames, sapply, setdiff, sort,
## table, tapply, union, unique, unsplit, which.max, which.min
## Loading required package: Biobase
## Welcome to Bioconductor
##
## Vignettes contain introductory material; view with
## 'browseVignettes()'. To cite Bioconductor, see
## 'citation("Biobase")', and for packages 'citation("pkgname")'.
## Loading required package: annotate
## Loading required package: AnnotationDbi
## Loading required package: stats4
## Loading required package: IRanges
## Loading required package: S4Vectors
##
## Attaching package: 'S4Vectors'
## The following object is masked from 'package:utils':
##
## findMatches
## The following objects are masked from 'package:base':
##
## expand.grid, I, unname
## Loading required package: XML
## Loading required package: graph
##
## Attaching package: 'graph'
## The following object is masked from 'package:XML':
##
## addNode
library(viper)
library(AUCell)
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ dplyr 1.1.4 ✔ readr 2.1.5
## ✔ forcats 1.0.0 ✔ stringr 1.5.1
## ✔ ggplot2 3.5.1 ✔ tibble 3.2.1
## ✔ lubridate 1.9.3 ✔ tidyr 1.3.1
## ✔ purrr 1.0.2
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ lubridate::%within%() masks IRanges::%within%()
## ✖ stringr::boundary() masks graph::boundary()
## ✖ dplyr::collapse() masks IRanges::collapse()
## ✖ dplyr::combine() masks Biobase::combine(), BiocGenerics::combine()
## ✖ dplyr::desc() masks IRanges::desc()
## ✖ tidyr::expand() masks S4Vectors::expand()
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::first() masks S4Vectors::first()
## ✖ dplyr::lag() masks stats::lag()
## ✖ ggplot2::Position() masks BiocGenerics::Position(), base::Position()
## ✖ purrr::reduce() masks IRanges::reduce()
## ✖ dplyr::rename() masks S4Vectors::rename()
## ✖ lubridate::second() masks S4Vectors::second()
## ✖ lubridate::second<-() masks S4Vectors::second<-()
## ✖ dplyr::select() masks AnnotationDbi::select()
## ✖ dplyr::slice() masks IRanges::slice()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(dyngen)
ksource <- function(x, ...) {
library(knitr)
source(purl(x, output = tempfile()), ...)
}
# install.packages("/domino/edv/chbv-bm-single-cell/KVande/24/transfactorPaper/transfactor/", repos = NULL)
library(transfactor)
## Loading required package: SingleCellExperiment
## Loading required package: SummarizedExperiment
## Loading required package: MatrixGenerics
## Loading required package: matrixStats
##
## Attaching package: 'matrixStats'
##
## The following object is masked from 'package:dplyr':
##
## count
##
## The following objects are masked from 'package:Biobase':
##
## anyMissing, rowMedians
##
##
## Attaching package: 'MatrixGenerics'
##
## The following objects are masked from 'package:matrixStats':
##
## colAlls, colAnyNAs, colAnys, colAvgsPerRowSet, colCollapse,
## colCounts, colCummaxs, colCummins, colCumprods, colCumsums,
## colDiffs, colIQRDiffs, colIQRs, colLogSumExps, colMadDiffs,
## colMads, colMaxs, colMeans2, colMedians, colMins, colOrderStats,
## colProds, colQuantiles, colRanges, colRanks, colSdDiffs, colSds,
## colSums2, colTabulates, colVarDiffs, colVars, colWeightedMads,
## colWeightedMeans, colWeightedMedians, colWeightedSds,
## colWeightedVars, rowAlls, rowAnyNAs, rowAnys, rowAvgsPerColSet,
## rowCollapse, rowCounts, rowCummaxs, rowCummins, rowCumprods,
## rowCumsums, rowDiffs, rowIQRDiffs, rowIQRs, rowLogSumExps,
## rowMadDiffs, rowMads, rowMaxs, rowMeans2, rowMedians, rowMins,
## rowOrderStats, rowProds, rowQuantiles, rowRanges, rowRanks,
## rowSdDiffs, rowSds, rowSums2, rowTabulates, rowVarDiffs, rowVars,
## rowWeightedMads, rowWeightedMeans, rowWeightedMedians,
## rowWeightedSds, rowWeightedVars
##
## The following object is masked from 'package:Biobase':
##
## rowMedians
##
## Loading required package: GenomicRanges
## Loading required package: GenomeInfoDb
## Loading required package: glmnet
## Loading required package: Matrix
##
## Attaching package: 'Matrix'
##
## The following objects are masked from 'package:tidyr':
##
## expand, pack, unpack
##
## The following object is masked from 'package:S4Vectors':
##
## expand
##
## Loaded glmnet 4.1-8
## Loading required package: fastmatch
##
## Attaching package: 'fastmatch'
##
## The following object is masked from 'package:dplyr':
##
## coalesce
##
## Loading required package: arrayhelpers
## Package arrayhelpers, version 1.1-0
##
## If you use this package please cite it appropriately.
## citation("arrayhelpers")
## will give you the correct reference.
##
## The project homepage is http://arrayhelpers.r-forge.r-project.org/
##
##
##
## Attaching package: 'arrayhelpers'
##
## The following object is masked from 'package:dplyr':
##
## slice
##
## The following object is masked from 'package:IRanges':
##
## slice
##
## Loading required package: edgeR
## Loading required package: limma
##
## Attaching package: 'limma'
##
## The following object is masked from 'package:BiocGenerics':
##
## plotMA
##
##
## Attaching package: 'edgeR'
##
## The following object is masked from 'package:SingleCellExperiment':
##
## cpm
source('../simulateFromModel/20201120_evaluateSimulatedDataset.R')
data <- readRDS("./datasets/simIters/dataset1.rds")
model <- data[[1]]
dataset <- data[[2]]
## plot
### Trajectory
# doesnt work: install.packages("/domino/edv/chbv-bm-single-cell/KVande/24/transfactorPaper/dynplot/", repos = NULL)
# devtools::install_github(repo="dynverse/dynfeature")
# devtools::install_github(repo="dynverse/dynplot")
library(dynplot)
wrappedDataset <- wrap_dataset(dataset)
# plots an MDS
# g1 <- dynplot::plot_dimred(wrappedDataset,
# label_milestones = TRUE,
# size_cells = 1,
# size_trajectory = 2)
### GRN
g2 <- plot_backbone_modulenet(dataset) + labs(title = "Backbone module reg. net.")
g3 <- plot_feature_network(dataset, show_targets = FALSE) +
labs(title = "TF reg. net.")
g4 <- plot_feature_network(dataset) + labs(title = "TF + target reg. net.")
g <- patchwork::wrap_plots(g2, g3, g4, byrow = TRUE, ncol = 2, nrow = 2,
widths = rep(1, 2), heights = rep(1, 2)) +
patchwork::plot_annotation(tag_levels = "A")
# ggsave("../plots/simDataDyngen1Plots.pdf", g, width = 30, height = 15)
countsAll <- t(as.matrix(dataset$experiment$counts_premrna + dataset$experiment$counts_mrna))
# only look at TFs
feature_info <- dataset$feature_info
tf_info <- feature_info %>% filter(is_tf)
feature_network <- dataset$feature_network %>% mutate(name = paste0(from, "->", to))
### get true GRN
tf <- as.character(unique(feature_network$from))
targets <- as.character(unique(feature_network$to))
## full GRN including repressions
XAll <- matrix(0, nrow=length(targets), ncol=length(tf),
dimnames=list(targets, tf))
for(tt in 1:ncol(XAll)){
curTF <- tf[tt]
curTFTargets <- as.character(feature_network[feature_network$from == curTF, "to"][[1]])
curTFEffects <- feature_network[feature_network$from == curTF, "effect"][[1]]
XAll[cbind(curTFTargets,curTF)] <- curTFEffects
}
#sort(colSums(abs(XAll))) ; sort(table(feature_network$from)[colnames(XAll)]) #should be same
XAll <- XAll[rowSums(abs(XAll))>0,]
XAll <- XAll[,colSums(abs(XAll))>0]
# remove TFs with only repressions
tfsWithOnlyRepression <- apply(XAll, 2, function(x){
all(x == 0 | x == -1)
})
XAll <- XAll[,!tfsWithOnlyRepression]
barplot(table(rowSums(abs(XAll))))
barplot(table(colSums(abs(XAll))))
## alpha
alpha <- abs(XAll)
id <- as.matrix(cbind(dataset$feature_network[,2], dataset$feature_network[,1]))
keepId <- id[,1] %in% rownames(XAll) & id[,2] %in% colnames(XAll) & dataset$feature_network$effect == 1
id <- id[keepId,]
# alpha[id] <- dataset$feature_network$strength[keepId]
### more discriminative prior by making this much larger
alpha[id] <- dataset$feature_network$strength[keepId] * 100
# truth: only TFs from burn-in or modules.
truth <- data.frame(de=rep(1, ncol(XAll)),
row.names=colnames(XAll))
truth[grep(x=rownames(truth), pattern="Target|HK"),] <- 0
countsTfTargets <- countsAll[rownames(XAll),]
nNoiseEdges <- 500
nNoiseTFs <- 10
XNoisy <- addNoiseToGRN(X = XAll,
nNoiseTFs = nNoiseTFs,
nNoiseEdges = nNoiseEdges,
noiseProb = 3/nrow(XAll),
seed = 10)
# truth
truthNoisy <- rbind(truth, data.frame(de=rep(0, nNoiseTFs),
row.names=paste0("noisyTF",1:nNoiseTFs)))
## filter if necessary
keepGene <- rowSums(abs(XNoisy)) > 0
counts <- countsTfTargets[keepGene,]
curAlpha <- alpha[keepGene,]
XNoisy <- XNoisy[keepGene,]
keepTF <- colSums(abs(XNoisy)) > 0
truthNoisy <- truthNoisy[keepTF,,drop=FALSE]
# get true alpha for noisy GRN
if(nNoiseTFs > 0){
alphaTrueNoisy <- cbind(curAlpha, XNoisy[,(ncol(XAll)+1):(ncol(XAll)+nNoiseTFs)])
} else {
alphaTrueNoisy <- curAlpha
}
XNoisy <- XNoisy[,keepTF]
alphaTrueNoisy <- alphaTrueNoisy[,keepTF]
alphaTrueNoisy[XNoisy == 0] <- 0
rownames(truthNoisy) <- colnames(XNoisy)
# viper regulon
regulonNoisy <- constructViperRegulon(XNoisy, alphaTrueNoisy)
# AUCell genesets
genesetsNoisy <- constructGenesets(XNoisy, alphaTrueNoisy)
qSteps <- 0.05
pt <- dataset$experiment$cell_info$sim_time
ptGroups <- Hmisc::cut2(pt, cuts = quantile(pt, prob=seq(0,1,by=qSteps)))
Xpt <- model.matrix(~0+ptGroups)
design <- Xpt
res <- evaluateSimulation_repressions(counts = countsAll,
design = design,
X = XNoisy,
alpha = alphaTrueNoisy,
regulon = regulonNoisy,
genesets = genesetsNoisy,
truth = truthNoisy,
verbose = TRUE,
alphaScale = 1)
## iteration 1
## iteration 2. Log-lik: -1118332.78
## iteration 3. Log-lik: -1111375.805
## iteration 4. Log-lik: -1105109.174
## iteration 5. Log-lik: -1098903.067
## iteration 6. Log-lik: -1092249.927
## iteration 7. Log-lik: -1084999.145
## iteration 8. Log-lik: -1077585.362
## iteration 9. Log-lik: -1069942.632
## iteration 10. Log-lik: -1062369.082
## iteration 11. Log-lik: -1054467.262
## iteration 12. Log-lik: -1046600.973
## iteration 13. Log-lik: -1039261.547
## iteration 14. Log-lik: -1032549.811
## iteration 15. Log-lik: -1026212.513
## iteration 16. Log-lik: -1020756.618
## iteration 17. Log-lik: -1015497.53
## iteration 18. Log-lik: -1010500.715
## iteration 19. Log-lik: -1005697.356
## iteration 20. Log-lik: -1001286.796
## iteration 21. Log-lik: -997254.366
## iteration 22. Log-lik: -993373.825
## iteration 23. Log-lik: -989830.219
## iteration 24. Log-lik: -986500.189
## iteration 25. Log-lik: -983483.565
## iteration 26. Log-lik: -980774.789
## iteration 27. Log-lik: -978187.76
## iteration 28. Log-lik: -975983.299
## iteration 29. Log-lik: -973806.371
## iteration 30. Log-lik: -971939.502
## iteration 31. Log-lik: -970098.215
## iteration 32. Log-lik: -968401.764
## iteration 33. Log-lik: -966859.676
## iteration 34. Log-lik: -965579.414
## iteration 35. Log-lik: -964212.144
## iteration 36. Log-lik: -962957.955
## iteration 37. Log-lik: -961913.272
## iteration 38. Log-lik: -960887.287
## iteration 39. Log-lik: -959976.918
## iteration 40. Log-lik: -959089.697
## iteration 41. Log-lik: -958337.076
## iteration 42. Log-lik: -957596.399
## iteration 43. Log-lik: -956869.802
## iteration 44. Log-lik: -956079.376
## iteration 45. Log-lik: -955434.366
## iteration 46. Log-lik: -954872.012
## iteration 47. Log-lik: -954309.233
## iteration 48. Log-lik: -953725.803
## iteration 49. Log-lik: -953130.474
## iteration 50. Log-lik: -952636.974
## iteration 51. Log-lik: -952073.858
## iteration 52. Log-lik: -951568.599
## iteration 53. Log-lik: -951102.657
## iteration 54. Log-lik: -950613.825
## iteration 55. Log-lik: -950208.174
## iteration 56. Log-lik: -949791.741
## iteration 57. Log-lik: -949302.203
## iteration 58. Log-lik: -948863.807
## iteration 59. Log-lik: -948539.164
## iteration 60. Log-lik: -948060.502
## iteration 61. Log-lik: -947695.457
## iteration 62. Log-lik: -947323.977
## iteration 63. Log-lik: -946938.153
## iteration 64. Log-lik: -946591.721
## iteration 65. Log-lik: -946251.778
## iteration 66. Log-lik: -945906.116
## iteration 67. Log-lik: -945624.923
## iteration 68. Log-lik: -945310.694
## iteration 69. Log-lik: -944936.718
## iteration 70. Log-lik: -944599.983
## iteration 71. Log-lik: -944267.856
## iteration 72. Log-lik: -944035.041
## iteration 73. Log-lik: -943828.664
## iteration 74. Log-lik: -943572.246
## iteration 75. Log-lik: -943273.029
## iteration 76. Log-lik: -943016.287
## iteration 77. Log-lik: -942785.34
## iteration 78. Log-lik: -942560.71
## iteration 79. Log-lik: -942316.966
## iteration 80. Log-lik: -942131.043
## iteration 81. Log-lik: -941875.584
## iteration 82. Log-lik: -941588.892
## iteration 83. Log-lik: -941410.154
## iteration 84. Log-lik: -941227.522
## iteration 85. Log-lik: -941031.541
## iteration 86. Log-lik: -940869.27
## iteration 87. Log-lik: -940669.382
## iteration 88. Log-lik: -940409.448
## iteration 89. Log-lik: -940251.337
## iteration 90. Log-lik: -940062.582
## iteration 91. Log-lik: -939895.071
## iteration 92. Log-lik: -939738.546
## iteration 93. Log-lik: -939586.722
## iteration 94. Log-lik: -939429.251
## iteration 95. Log-lik: -939243.801
## iteration 96. Log-lik: -939075.132
## iteration 97. Log-lik: -938917.378
## iteration 98. Log-lik: -938786.871
## iteration 99. Log-lik: -938669.567
## iteration 100. Log-lik: -938515.217
## iteration 101. Log-lik: -938363.959
## iteration 102. Log-lik: -938236.218
## iteration 103. Log-lik: -938140.337
## iteration 104. Log-lik: -937994.637
## iteration 105. Log-lik: -937890.308
## iteration 106. Log-lik: -937779.007
## iteration 107. Log-lik: -937638.329
## iteration 108. Log-lik: -937543.749
## iteration 109. Log-lik: -937453.708
## iteration 110. Log-lik: -937366.12
## iteration 111. Log-lik: -937247.554
## iteration 112. Log-lik: -937120.586
## iteration 113. Log-lik: -937034.838
## iteration 114. Log-lik: -936948.68
## iteration 115. Log-lik: -936797.083
## iteration 116. Log-lik: -936679.039
## iteration 117. Log-lik: -936609.743
## iteration 118. Log-lik: -936522.429
## iteration 119. Log-lik: -936430.784
## iteration 120. Log-lik: -936360.922
## iteration 121. Log-lik: -936290.127
## iteration 122. Log-lik: -936217.909
## iteration 123. Log-lik: -936134.899
## iteration 124. Log-lik: -936072.543
## iteration 125. Log-lik: -935992.568
## iteration 126. Log-lik: -935914.894
## iteration 127. Log-lik: -935843.612
## iteration 128. Log-lik: -935710.904
## iteration 129. Log-lik: -935662.092
## iteration 130. Log-lik: -935562.356
## iteration 131. Log-lik: -935491.713
## iteration 132. Log-lik: -935410.231
## iteration 133. Log-lik: -935328.727
## iteration 134. Log-lik: -935264.1
## iteration 135. Log-lik: -935216.518
## iteration 136. Log-lik: -935152.007
## iteration 137. Log-lik: -935088.813
## iteration 138. Log-lik: -935023.801
## iteration 139. Log-lik: -934984.457
## iteration 140. Log-lik: -934902.415
## iteration 141. Log-lik: -934814.426
## iteration 142. Log-lik: -934754.963
## iteration 143. Log-lik: -934717.028
## iteration 144. Log-lik: -934646.452
## iteration 145. Log-lik: -934585.751
## iteration 146. Log-lik: -934532.76
## iteration 147. Log-lik: -934479.534
## iteration 148. Log-lik: -934425.833
## iteration 149. Log-lik: -934367.731
## iteration 150. Log-lik: -934287.404
## iteration 151. Log-lik: -934244.531
## iteration 152. Log-lik: -934190.485
## iteration 153. Log-lik: -934146.563
## iteration 154. Log-lik: -934098.321
## iteration 155. Log-lik: -934070.804
## iteration 156. Log-lik: -934036.969
## iteration 157. Log-lik: -933990.16
## iteration 158. Log-lik: -933893.925
## iteration 159. Log-lik: -933846.358
## iteration 160. Log-lik: -933812.53
## iteration 161. Log-lik: -933771.902
## iteration 162. Log-lik: -933736.99
## iteration 163. Log-lik: -933711.519
## iteration 164. Log-lik: -933667.798
## iteration 165. Log-lik: -933639.9
## iteration 166. Log-lik: -933591.304
## iteration 167. Log-lik: -933553.38
## iteration 168. Log-lik: -933525.81
## iteration 169. Log-lik: -933503.756
## iteration 170. Log-lik: -933482.593
## iteration 171. Log-lik: -933452.231
## iteration 172. Log-lik: -933432.831
## iteration 173. Log-lik: -933394.086
## iteration 174. Log-lik: -933358.955
## iteration 175. Log-lik: -933328.799
## iteration 176. Log-lik: -933299.188
## iteration 177. Log-lik: -933265.51
## iteration 178. Log-lik: -933241.685
## iteration 179. Log-lik: -933207.96
## iteration 180. Log-lik: -933178.428
## iteration 181. Log-lik: -933147.898
## iteration 182. Log-lik: -933115.512
## iteration 183. Log-lik: -933090.5
## iteration 184. Log-lik: -933061.505
## iteration 185. Log-lik: -933032.901
## iteration 186. Log-lik: -933003.976
## iteration 187. Log-lik: -932971.772
## iteration 188. Log-lik: -932929.086
## iteration 189. Log-lik: -932898.231
## iteration 190. Log-lik: -932885.328
## iteration 191. Log-lik: -932865.009
## iteration 192. Log-lik: -932863.458
## iteration 193. Log-lik: -932848.773
## iteration 194. Log-lik: -932808.924
## iteration 195. Log-lik: -932786.81
## iteration 196. Log-lik: -932746.253
## iteration 197. Log-lik: -932734.39
## iteration 198. Log-lik: -932713.678
## iteration 199. Log-lik: -932686.582
## iteration 200. Log-lik: -932681.345
## iteration 201. Log-lik: -932669.112
## iteration 202. Log-lik: -932630.794
## iteration 203. Log-lik: -932615.191
## iteration 204. Log-lik: -932571.662
## iteration 205. Log-lik: -932553.428
## iteration 206. Log-lik: -932520.762
## iteration 207. Log-lik: -932502.054
## iteration 208. Log-lik: -932470.672
## iteration 209. Log-lik: -932454.737
## iteration 210. Log-lik: -932435.654
## iteration 211. Log-lik: -932399.53
## iteration 212. Log-lik: -932389.603
## iteration 213. Log-lik: -932367.841
## iteration 214. Log-lik: -932356.365
## iteration 215. Log-lik: -932322.383
## iteration 216. Log-lik: -932322.484
## iteration 217. Log-lik: -932297.634
## iteration 218. Log-lik: -932281.582
## iteration 219. Log-lik: -932263.836
## iteration 220. Log-lik: -932264.678
## iteration 221. Log-lik: -932240.299
## iteration 222. Log-lik: -932228.522
## iteration 223. Log-lik: -932198.645
## iteration 224. Log-lik: -932183.214
## iteration 225. Log-lik: -932166.618
## iteration 226. Log-lik: -932135.68
## iteration 227. Log-lik: -932112.866
## iteration 228. Log-lik: -932108.075
## iteration 229. Log-lik: -932103.061
## iteration 230. Log-lik: -932071.639
## iteration 231. Log-lik: -932063.158
## iteration 232. Log-lik: -932038.099
## iteration 233. Log-lik: -932017.15
## iteration 234. Log-lik: -932000.352
## iteration 235. Log-lik: -931984.342
## iteration 236. Log-lik: -931956.627
## iteration 237. Log-lik: -931926.524
## iteration 238. Log-lik: -931917.442
## iteration 239. Log-lik: -931905.335
## iteration 240. Log-lik: -931906.742
## iteration 241. Log-lik: -931895.422
## iteration 242. Log-lik: -931881.54
## iteration 243. Log-lik: -931866.076
## iteration 244. Log-lik: -931845.13
## iteration 245. Log-lik: -931828.047
## iteration 246. Log-lik: -931815.582
## iteration 247. Log-lik: -931802.696
## iteration 248. Log-lik: -931785.525
## iteration 249. Log-lik: -931785.296
## iteration 250. Log-lik: -931769.921
## iteration 251. Log-lik: -931757.315
## iteration 252. Log-lik: -931745.634
## iteration 253. Log-lik: -931742.704
## iteration 254. Log-lik: -931730.394
## iteration 255. Log-lik: -931712.667
## iteration 256. Log-lik: -931708.545
## iteration 257. Log-lik: -931693.441
## iteration 258. Log-lik: -931678.397
## iteration 259. Log-lik: -931663.258
## iteration 260. Log-lik: -931654.449
## iteration 261. Log-lik: -931635.476
## iteration 262. Log-lik: -931616.611
## iteration 263. Log-lik: -931591.931
## iteration 264. Log-lik: -931578.443
## iteration 265. Log-lik: -931560.539
## iteration 266. Log-lik: -931548.272
## iteration 267. Log-lik: -931530.999
## iteration 268. Log-lik: -931510.819
## iteration 269. Log-lik: -931497.316
## iteration 270. Log-lik: -931486.102
## iteration 271. Log-lik: -931473.817
## iteration 272. Log-lik: -931472.921
## iteration 273. Log-lik: -931452.833
## iteration 274. Log-lik: -931440.21
## iteration 275. Log-lik: -931426.784
## iteration 276. Log-lik: -931420.769
## iteration 277. Log-lik: -931413.223
## iteration 278. Log-lik: -931404.443
## iteration 279. Log-lik: -931391.22
## iteration 280. Log-lik: -931385.06
## iteration 281. Log-lik: -931380.583
## iteration 282. Log-lik: -931371.374
## iteration 283. Log-lik: -931357.841
## iteration 284. Log-lik: -931356.883
## iteration 285. Log-lik: -931343.256
## iteration 286. Log-lik: -931330.227
## iteration 287. Log-lik: -931313.915
## iteration 288. Log-lik: -931312.774
## iteration 289. Log-lik: -931290.628
## iteration 290. Log-lik: -931293.502
## iteration 291. Log-lik: -931287.636
## iteration 292. Log-lik: -931270.292
## iteration 293. Log-lik: -931256.576
## iteration 294. Log-lik: -931250.508
## iteration 295. Log-lik: -931244.938
## iteration 296. Log-lik: -931243.705
## iteration 297. Log-lik: -931231.269
## iteration 298. Log-lik: -931214.14
## iteration 299. Log-lik: -931212.18
## iteration 300. Log-lik: -931206.613
## iteration 301. Log-lik: -931189.524
## iteration 302. Log-lik: -931182.188
## iteration 303. Log-lik: -931172.796
## iteration 304. Log-lik: -931166.858
## iteration 305. Log-lik: -931165.572
## iteration 306. Log-lik: -931156.189
## iteration 307. Log-lik: -931150.803
## iteration 308. Log-lik: -931146.826
## iteration 309. Log-lik: -931135.031
## iteration 310. Log-lik: -931120.523
## iteration 311. Log-lik: -931106.484
## iteration 312. Log-lik: -931105.179
## iteration 313. Log-lik: -931091.65
## iteration 314. Log-lik: -931070.028
## iteration 315. Log-lik: -931053.183
## iteration 316. Log-lik: -931047.683
## iteration 317. Log-lik: -931042.298
## iteration 318. Log-lik: -931034.999
## iteration 319. Log-lik: -931029.56
## iteration 320. Log-lik: -931011.732
## iteration 321. Log-lik: -931001.231
## iteration 322. Log-lik: -931003.68
## iteration 323. Log-lik: -930996.908
## iteration 324. Log-lik: -930996.669
## iteration 325. Log-lik: -930980.025
## iteration 326. Log-lik: -930978.539
## iteration 327. Log-lik: -930969.719
## iteration 328. Log-lik: -930959.67
## iteration 329. Log-lik: -930958.142
## iteration 330. Log-lik: -930948.332
## iteration 331. Log-lik: -930941.588
## iteration 332. Log-lik: -930939.438
## iteration 333. Log-lik: -930918.637
## iteration 334. Log-lik: -930920.34
## iteration 335. Log-lik: -930914.151
## iteration 336. Log-lik: -930905.128
## iteration 337. Log-lik: -930886.906
## iteration 338. Log-lik: -930880.98
## iteration 339. Log-lik: -930863.402
## iteration 340. Log-lik: -930849.053
## iteration 341. Log-lik: -930843.606
## iteration 342. Log-lik: -930841.648
## iteration 343. Log-lik: -930837.103
## iteration 344. Log-lik: -930831.366
## iteration 345. Log-lik: -930829.121
## iteration 346. Log-lik: -930815.59
## iteration 347. Log-lik: -930801.805
## iteration 348. Log-lik: -930796.082
## iteration 349. Log-lik: -930793.67
## iteration 350. Log-lik: -930787.764
## iteration 351. Log-lik: -930785.923
## iteration 352. Log-lik: -930779.393
## iteration 353. Log-lik: -930754.488
## iteration 354. Log-lik: -930747.782
## iteration 355. Log-lik: -930749.225
## iteration 356. Log-lik: -930743.451
## iteration 357. Log-lik: -930737.455
## iteration 358. Log-lik: -930736.864
## iteration 359. Log-lik: -930731.029
## iteration 360. Log-lik: -930721.859
## iteration 361. Log-lik: -930715.881
## iteration 362. Log-lik: -930709.858
## iteration 363. Log-lik: -930691.96
## iteration 364. Log-lik: -930677.594
## iteration 365. Log-lik: -930679.598
## iteration 366. Log-lik: -930669.52
## iteration 367. Log-lik: -930666.954
## iteration 368. Log-lik: -930652.728
## iteration 369. Log-lik: -930646.444
## iteration 370. Log-lik: -930629.547
## iteration 371. Log-lik: -930627.482
## iteration 372. Log-lik: -930618.444
## iteration 373. Log-lik: -930620.377
## iteration 374. Log-lik: -930622.307
## iteration 375. Log-lik: -930615.75
## iteration 376. Log-lik: -930609.727
## iteration 377. Log-lik: -930607.631
## iteration 378. Log-lik: -930601.564
## iteration 379. Log-lik: -930599.446
## iteration 380. Log-lik: -930588.52
## iteration 381. Log-lik: -930578.378
## iteration 382. Log-lik: -930572.021
## iteration 383. Log-lik: -930566.054
## iteration 384. Log-lik: -930555.358
## iteration 385. Log-lik: -930552.906
## iteration 386. Log-lik: -930550.894
## iteration 387. Log-lik: -930528.622
## iteration 388. Log-lik: -930510.762
## iteration 389. Log-lik: -930508.239
## iteration 390. Log-lik: -930493.463
## iteration 391. Log-lik: -930495.208
## iteration 392. Log-lik: -930496.949
## iteration 393. Log-lik: -930494.644
## iteration 394. Log-lik: -930487.874
## iteration 395. Log-lik: -930477.58
## iteration 396. Log-lik: -930466.784
## iteration 397. Log-lik: -930464.494
## iteration 398. Log-lik: -930462.178
## iteration 399. Log-lik: -930460.016
## iteration 400. Log-lik: -930461.68
## iteration 401. Log-lik: -930455.282
## iteration 402. Log-lik: -930446.31
## iteration 403. Log-lik: -930443.996
## iteration 404. Log-lik: -930449.679
## iteration 405. Log-lik: -930438.558
## iteration 406. Log-lik: -930427.836
## iteration 407. Log-lik: -930420.969
## iteration 408. Log-lik: -930409.905
## iteration 409. Log-lik: -930398.982
## iteration 410. Log-lik: -930388.551
## iteration 411. Log-lik: -930390.118
## iteration 412. Log-lik: -930379.703
## iteration 413. Log-lik: -930373.215
## iteration 414. Log-lik: -930374.748
## iteration 415. Log-lik: -930368.33
## iteration 416. Log-lik: -930365.866
## iteration 417. Log-lik: -930367.383
## iteration 418. Log-lik: -930360.938
## iteration 419. Log-lik: -930349.797
## iteration 420. Log-lik: -930343.225
## iteration 421. Log-lik: -930336.766
## iteration 422. Log-lik: -930334.185
## iteration 423. Log-lik: -930332.284
## iteration 424. Log-lik: -930323.763
## iteration 425. Log-lik: -930329.195
## iteration 426. Log-lik: -930326.668
## iteration 427. Log-lik: -930324.137
## iteration 428. Log-lik: -930321.559
## iteration 429. Log-lik: -930318.381
## iteration 430. Log-lik: -930307.659
## iteration 431. Log-lik: -930301.032
## iteration 432. Log-lik: -930298.478
## iteration 433. Log-lik: -930296.05
## iteration 434. Log-lik: -930293.244
## iteration 435. Log-lik: -930290.656
## iteration 436. Log-lik: -930288.068
## iteration 437. Log-lik: -930289.447
## iteration 438. Log-lik: -930278.303
## iteration 439. Log-lik: -930279.664
## iteration 440. Log-lik: -930264.385
## iteration 441. Log-lik: -930257.061
## iteration 442. Log-lik: -930245.563
## iteration 443. Log-lik: -930246.87
## iteration 444. Log-lik: -930239.895
## iteration 445. Log-lik: -930241.19
## iteration 446. Log-lik: -930234.44
## iteration 447. Log-lik: -930227.682
## iteration 448. Log-lik: -930228.961
## iteration 449. Log-lik: -930219.45
## iteration 450. Log-lik: -930216.481
## iteration 451. Log-lik: -930213.795
## iteration 452. Log-lik: -930215.062
## iteration 453. Log-lik: -930204.172
## iteration 454. Log-lik: -930205.429
## iteration 455. Log-lik: -930194.622
## iteration 456. Log-lik: -930187.703
## iteration 457. Log-lik: -930188.938
## iteration 458. Log-lik: -930190.171
## iteration 459. Log-lik: -930191.402
## iteration 460. Log-lik: -930188.585
## iteration 461. Log-lik: -930177.275
## iteration 462. Log-lik: -930170.416
## iteration 463. Log-lik: -930163.436
## iteration 464. Log-lik: -930156.627
## iteration 465. Log-lik: -930157.807
## iteration 466. Log-lik: -930158.986
## iteration 467. Log-lik: -930160.163
## iteration 468. Log-lik: -930157.291
## iteration 469. Log-lik: -930154.502
## iteration 470. Log-lik: -930150.747
## iteration 471. Log-lik: -930139.821
## iteration 472. Log-lik: -930129.061
## iteration 473. Log-lik: -930122.149
## iteration 474. Log-lik: -930119.091
## iteration 475. Log-lik: -930116.185
## iteration 476. Log-lik: -930109.331
## iteration 477. Log-lik: -930094.387
## iteration 478. Log-lik: -930091.398
## iteration 479. Log-lik: -930076.242
## iteration 480. Log-lik: -930073.598
## iteration 481. Log-lik: -930074.669
## iteration 482. Log-lik: -930063.813
## iteration 483. Log-lik: -930064.889
## iteration 484. Log-lik: -930057.375
## iteration 485. Log-lik: -930054.449
## iteration 486. Log-lik: -930055.505
## iteration 487. Log-lik: -930056.559
## iteration 488. Log-lik: -930057.611
## iteration 489. Log-lik: -930058.661
## iteration 490. Log-lik: -930059.71
## iteration 491. Log-lik: -930058.075
## iteration 492. Log-lik: -930059.146
## iteration 493. Log-lik: -930056.229
## iteration 494. Log-lik: -930053.212
## iteration 495. Log-lik: -930054.275
## iteration 496. Log-lik: -930051.175
## iteration 497. Log-lik: -930044.187
## iteration 498. Log-lik: -930041.215
## iteration 499. Log-lik: -930042.257
## iteration 500. Log-lik: -930043.296
##
## Attaching package: 'glmGamPoi'
## The following object is masked from 'package:dplyr':
##
## vars
## The following object is masked from 'package:ggplot2':
##
## vars
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## iteration 1
## iteration 2. Log-lik: -1118881.43
## iteration 3. Log-lik: -1112969.105
## iteration 4. Log-lik: -1109529.513
## iteration 5. Log-lik: -1107449.606
## iteration 6. Log-lik: -1106231.333
## iteration 7. Log-lik: -1104621.289
## iteration 8. Log-lik: -1103225.688
## iteration 9. Log-lik: -1102157.067
## iteration 10. Log-lik: -1101183.329
## iteration 11. Log-lik: -1100274.405
## iteration 12. Log-lik: -1099573.962
## iteration 13. Log-lik: -1098964.3
## iteration 14. Log-lik: -1098480.235
## iteration 15. Log-lik: -1098145.371
## iteration 16. Log-lik: -1097885.669
## iteration 17. Log-lik: -1097719.152
## iteration 18. Log-lik: -1097514.558
## iteration 19. Log-lik: -1097413.015
## iteration 20. Log-lik: -1097332.594
## iteration 21. Log-lik: -1097275.227
## iteration 22. Log-lik: -1097230.693
## iteration 23. Log-lik: -1097185.722
## iteration 24. Log-lik: -1097131.897
## iteration 25. Log-lik: -1097119.984
## iteration 26. Log-lik: -1097102.23
## iteration 27. Log-lik: -1097073.158
## iteration 28. Log-lik: -1097053.252
## iteration 29. Log-lik: -1097039.013
## iteration 30. Log-lik: -1097032.445
## iteration 31. Log-lik: -1097030.319
## iteration 32. Log-lik: -1097020.879
## iteration 33. Log-lik: -1097017.23
## iteration 34. Log-lik: -1097007.022
## iteration 35. Log-lik: -1096998.7
## iteration 36. Log-lik: -1096995.05
## iteration 37. Log-lik: -1096991.146
## iteration 38. Log-lik: -1096984.737
## iteration 39. Log-lik: -1096980.806
## iteration 40. Log-lik: -1096977.393
## iteration 41. Log-lik: -1096973.053
## iteration 42. Log-lik: -1096973.523
## iteration 43. Log-lik: -1096971.259
## iteration 44. Log-lik: -1096971.64
## iteration 45. Log-lik: -1096971.981
## iteration 46. Log-lik: -1096972.288
## iteration 47. Log-lik: -1096972.564
## iteration 48. Log-lik: -1096972.815
## iteration 49. Log-lik: -1096973.042
## iteration 50. Log-lik: -1096968.414
## iteration 51. Log-lik: -1096968.594
## iteration 52. Log-lik: -1096968.759
## iteration 53. Log-lik: -1096968.91
## iteration 54. Log-lik: -1096969.049
## iteration 55. Log-lik: -1096969.178
## iteration 56. Log-lik: -1096964.412
## iteration 57. Log-lik: -1096964.518
## iteration 58. Log-lik: -1096959.632
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## iteration 1
## iteration 2. Log-lik: -1118332.78
## iteration 3. Log-lik: -1110273.54
## iteration 4. Log-lik: -1104500.153
## iteration 5. Log-lik: -1098838.071
## iteration 6. Log-lik: -1092919.958
## iteration 7. Log-lik: -1086686.782
## iteration 8. Log-lik: -1079995.127
## iteration 9. Log-lik: -1073236.24
## iteration 10. Log-lik: -1066497.311
## iteration 11. Log-lik: -1059388.183
## iteration 12. Log-lik: -1052478.778
## iteration 13. Log-lik: -1046010.496
## iteration 14. Log-lik: -1040256.063
## iteration 15. Log-lik: -1034883.317
## iteration 16. Log-lik: -1029924.161
## iteration 17. Log-lik: -1025098.42
## iteration 18. Log-lik: -1020314.858
## iteration 19. Log-lik: -1015714.978
## iteration 20. Log-lik: -1011641.138
## iteration 21. Log-lik: -1007371.466
## iteration 22. Log-lik: -1003373.162
## iteration 23. Log-lik: -999809.533
## iteration 24. Log-lik: -996386.621
## iteration 25. Log-lik: -993232.512
## iteration 26. Log-lik: -990261.197
## iteration 27. Log-lik: -987792.688
## iteration 28. Log-lik: -985147.278
## iteration 29. Log-lik: -982934.773
## iteration 30. Log-lik: -981073.891
## iteration 31. Log-lik: -979505.485
## iteration 32. Log-lik: -977696.431
## iteration 33. Log-lik: -976011.623
## iteration 34. Log-lik: -974598.578
## iteration 35. Log-lik: -973200.69
## iteration 36. Log-lik: -971949.77
## iteration 37. Log-lik: -970939.687
## iteration 38. Log-lik: -969959.597
## iteration 39. Log-lik: -969000.708
## iteration 40. Log-lik: -968123.98
## iteration 41. Log-lik: -967356.942
## iteration 42. Log-lik: -966078.124
## iteration 43. Log-lik: -964862.535
## iteration 44. Log-lik: -963711.777
## iteration 45. Log-lik: -962672.63
## iteration 46. Log-lik: -961731.112
## iteration 47. Log-lik: -960859.886
## iteration 48. Log-lik: -960133.336
## iteration 49. Log-lik: -959409.662
## iteration 50. Log-lik: -958682.837
## iteration 51. Log-lik: -957965.806
## iteration 52. Log-lik: -957248.914
## iteration 53. Log-lik: -956619.231
## iteration 54. Log-lik: -956052.908
## iteration 55. Log-lik: -955599.453
## iteration 56. Log-lik: -955083.977
## iteration 57. Log-lik: -954627.23
## iteration 58. Log-lik: -954161.823
## iteration 59. Log-lik: -953744.345
## iteration 60. Log-lik: -953387.151
## iteration 61. Log-lik: -952976.341
## iteration 62. Log-lik: -952582.562
## iteration 63. Log-lik: -952234.064
## iteration 64. Log-lik: -951867.48
## iteration 65. Log-lik: -951440.043
## iteration 66. Log-lik: -951036.944
## iteration 67. Log-lik: -950680.374
## iteration 68. Log-lik: -950288.856
## iteration 69. Log-lik: -949976.922
## iteration 70. Log-lik: -949763.65
## iteration 71. Log-lik: -949407.016
## iteration 72. Log-lik: -949134.478
## iteration 73. Log-lik: -948883.225
## iteration 74. Log-lik: -948592.724
## iteration 75. Log-lik: -948273.833
## iteration 76. Log-lik: -948042.708
## iteration 77. Log-lik: -947858.083
## iteration 78. Log-lik: -947669.014
## iteration 79. Log-lik: -947510.6
## iteration 80. Log-lik: -947294.621
## iteration 81. Log-lik: -947128.148
## iteration 82. Log-lik: -946844.602
## iteration 83. Log-lik: -946638.009
## iteration 84. Log-lik: -946481.273
## iteration 85. Log-lik: -946374.962
## iteration 86. Log-lik: -946203.822
## iteration 87. Log-lik: -945941.966
## iteration 88. Log-lik: -945693.498
## iteration 89. Log-lik: -945538.225
## iteration 90. Log-lik: -945359.444
## iteration 91. Log-lik: -945162.628
## iteration 92. Log-lik: -945091.062
## iteration 93. Log-lik: -944934.746
## iteration 94. Log-lik: -944778.641
## iteration 95. Log-lik: -944553.955
## iteration 96. Log-lik: -944349.574
## iteration 97. Log-lik: -944224.468
## iteration 98. Log-lik: -944155.892
## iteration 99. Log-lik: -944105.564
## iteration 100. Log-lik: -944012.94
## iteration 101. Log-lik: -943856.387
## iteration 102. Log-lik: -943707.669
## iteration 103. Log-lik: -943570.983
## iteration 104. Log-lik: -943372.476
## iteration 105. Log-lik: -943292.653
## iteration 106. Log-lik: -943245.45
## iteration 107. Log-lik: -943154.088
## iteration 108. Log-lik: -943063.635
## iteration 109. Log-lik: -942963.459
## iteration 110. Log-lik: -942861.665
## iteration 111. Log-lik: -942744.67
## iteration 112. Log-lik: -942559.463
## iteration 113. Log-lik: -942379.798
## iteration 114. Log-lik: -942147.745
## iteration 115. Log-lik: -942030.732
## iteration 116. Log-lik: -941885.589
## iteration 117. Log-lik: -941730.436
## iteration 118. Log-lik: -941641.71
## iteration 119. Log-lik: -941536.445
## iteration 120. Log-lik: -941221.725
## iteration 121. Log-lik: -941086.733
## iteration 122. Log-lik: -940993.628
## iteration 123. Log-lik: -940957.033
## iteration 124. Log-lik: -940742.331
## iteration 125. Log-lik: -940664.633
## iteration 126. Log-lik: -940560.455
## iteration 127. Log-lik: -940520.813
## iteration 128. Log-lik: -940436.125
## iteration 129. Log-lik: -940355.46
## iteration 130. Log-lik: -940309.079
## iteration 131. Log-lik: -940201.602
## iteration 132. Log-lik: -940211.321
## iteration 133. Log-lik: -940163.656
## iteration 134. Log-lik: -940050.801
## iteration 135. Log-lik: -939994.902
## iteration 136. Log-lik: -939910.999
## iteration 137. Log-lik: -939834.827
## iteration 138. Log-lik: -939665.672
## iteration 139. Log-lik: -939601.562
## iteration 140. Log-lik: -939541.223
## iteration 141. Log-lik: -939468.237
## iteration 142. Log-lik: -939432.935
## iteration 143. Log-lik: -939391.715
## iteration 144. Log-lik: -939335.789
## iteration 145. Log-lik: -939233.748
## iteration 146. Log-lik: -939137.312
## iteration 147. Log-lik: -938943.344
## iteration 148. Log-lik: -938838.996
## iteration 149. Log-lik: -938700.46
## iteration 150. Log-lik: -938640.091
## iteration 151. Log-lik: -938602.012
## iteration 152. Log-lik: -938595.962
## iteration 153. Log-lik: -938509.13
## iteration 154. Log-lik: -938475.454
## iteration 155. Log-lik: -938347.454
## iteration 156. Log-lik: -938312.605
## iteration 157. Log-lik: -938130.39
## iteration 158. Log-lik: -938094.543
## iteration 159. Log-lik: -938066.385
## iteration 160. Log-lik: -938028.08
## iteration 161. Log-lik: -937850.161
## iteration 162. Log-lik: -937853.765
## iteration 163. Log-lik: -937797.652
## iteration 164. Log-lik: -937771.455
## iteration 165. Log-lik: -937756.575
## iteration 166. Log-lik: -937740.351
## iteration 167. Log-lik: -937662.349
## iteration 168. Log-lik: -937522.605
## iteration 169. Log-lik: -937466.07
## iteration 170. Log-lik: -937421.738
## iteration 171. Log-lik: -937388.885
## iteration 172. Log-lik: -937483.281
## iteration 173. Log-lik: -937460.6
## iteration 174. Log-lik: -937426.428
## iteration 175. Log-lik: -937406.101
## iteration 176. Log-lik: -937385.334
## iteration 177. Log-lik: -937376.561
## iteration 178. Log-lik: -937318.962
## iteration 179. Log-lik: -937204.977
## iteration 180. Log-lik: -937190.111
## iteration 181. Log-lik: -937191.234
## iteration 182. Log-lik: -937137.549
## iteration 183. Log-lik: -937147.066
## iteration 184. Log-lik: -937130.352
## iteration 185. Log-lik: -937114.47
## iteration 186. Log-lik: -937048.459
## iteration 187. Log-lik: -937055.097
## iteration 188. Log-lik: -936946.077
## iteration 189. Log-lik: -936910.915
## iteration 190. Log-lik: -936877.76
## iteration 191. Log-lik: -936858.652
## iteration 192. Log-lik: -936856.196
## iteration 193. Log-lik: -936701.767
## iteration 194. Log-lik: -936693.774
## iteration 195. Log-lik: -936722.681
## iteration 196. Log-lik: -936750.538
## iteration 197. Log-lik: -936736.591
## iteration 198. Log-lik: -936586.889
## iteration 199. Log-lik: -936574.533
## iteration 200. Log-lik: -936530.678
## iteration 201. Log-lik: -936463.439
## iteration 202. Log-lik: -936492.045
## iteration 203. Log-lik: -936527.295
## iteration 204. Log-lik: -936560.339
## iteration 205. Log-lik: -936574.197
## iteration 206. Log-lik: -936540.338
## iteration 207. Log-lik: -936498.854
## iteration 208. Log-lik: -936368.535
## iteration 209. Log-lik: -936361.126
## iteration 210. Log-lik: -936346.052
## iteration 211. Log-lik: -936382.997
## iteration 212. Log-lik: -936401.486
## iteration 213. Log-lik: -936443.168
## iteration 214. Log-lik: -936382.178
## iteration 215. Log-lik: -936387.888
## iteration 216. Log-lik: -936347.09
## iteration 217. Log-lik: -936230.798
## iteration 218. Log-lik: -936305.016
## iteration 219. Log-lik: -936191.786
## iteration 220. Log-lik: -936172.105
## iteration 221. Log-lik: -936177.01
## iteration 222. Log-lik: -936180.013
## iteration 223. Log-lik: -936171.848
## iteration 224. Log-lik: -936221.414
## iteration 225. Log-lik: -936077.232
## iteration 226. Log-lik: -935982.539
## iteration 227. Log-lik: -935921.519
## iteration 228. Log-lik: -935883.076
## iteration 229. Log-lik: -935895.589
## iteration 230. Log-lik: -935938.273
## iteration 231. Log-lik: -935925.105
## iteration 232. Log-lik: -935931.846
## iteration 233. Log-lik: -935930.05
## iteration 234. Log-lik: -935848.33
## iteration 235. Log-lik: -935891.171
## iteration 236. Log-lik: -935917.264
## iteration 237. Log-lik: -935894.299
## iteration 238. Log-lik: -935890.73
## iteration 239. Log-lik: -935822.991
## iteration 240. Log-lik: -935820.026
## iteration 241. Log-lik: -935792.383
## iteration 242. Log-lik: -935793.561
## iteration 243. Log-lik: -935824.861
## iteration 244. Log-lik: -935741.395
## iteration 245. Log-lik: -935785.1
## iteration 246. Log-lik: -935776.175
## iteration 247. Log-lik: -935771.39
## iteration 248. Log-lik: -935719.999
## iteration 249. Log-lik: -935661.428
## iteration 250. Log-lik: -935698.617
## iteration 251. Log-lik: -935714.902
## iteration 252. Log-lik: -935704.144
## iteration 253. Log-lik: -935720.734
## iteration 254. Log-lik: -935642.143
## iteration 255. Log-lik: -935601.946
## iteration 256. Log-lik: -935613.963
## iteration 257. Log-lik: -935662.542
## iteration 258. Log-lik: -935599.27
## iteration 259. Log-lik: -935543.117
## iteration 260. Log-lik: -935531.083
## iteration 261. Log-lik: -935517.646
## iteration 262. Log-lik: -935609.181
## iteration 263. Log-lik: -935657.266
## iteration 264. Log-lik: -935650.221
## iteration 265. Log-lik: -935635.4
## iteration 266. Log-lik: -935639.607
## iteration 267. Log-lik: -935499.508
## iteration 268. Log-lik: -935467.963
## iteration 269. Log-lik: -935437.544
## iteration 270. Log-lik: -935419.603
## iteration 271. Log-lik: -935448.785
## iteration 272. Log-lik: -935503.45
## iteration 273. Log-lik: -935517.799
## iteration 274. Log-lik: -935428.945
## iteration 275. Log-lik: -935383.19
## iteration 276. Log-lik: -935351.095
## iteration 277. Log-lik: -935269.114
## iteration 278. Log-lik: -935252.586
## iteration 279. Log-lik: -935287.654
## iteration 280. Log-lik: -935317.008
## iteration 281. Log-lik: -935308.195
## iteration 282. Log-lik: -935309.381
## iteration 283. Log-lik: -935354.46
## iteration 284. Log-lik: -935268.678
## iteration 285. Log-lik: -935237.508
## iteration 286. Log-lik: -935246.959
## iteration 287. Log-lik: -935285.858
## iteration 288. Log-lik: -935220.149
## iteration 289. Log-lik: -935240.335
## iteration 290. Log-lik: -935306.934
## iteration 291. Log-lik: -935303.713
## iteration 292. Log-lik: -935335.026
## iteration 293. Log-lik: -935259.733
## iteration 294. Log-lik: -935195.138
## iteration 295. Log-lik: -935199.826
## iteration 296. Log-lik: -935188.279
## iteration 297. Log-lik: -935109.68
## iteration 298. Log-lik: -935060.4
## iteration 299. Log-lik: -935048.152
## iteration 300. Log-lik: -935112.638
## iteration 301. Log-lik: -935127.629
## iteration 302. Log-lik: -935165.836
## iteration 303. Log-lik: -935212.82
## iteration 304. Log-lik: -935123.906
## iteration 305. Log-lik: -935087.32
## iteration 306. Log-lik: -935122.51
## iteration 307. Log-lik: -935121.419
## iteration 308. Log-lik: -935187.083
## iteration 309. Log-lik: -935197.944
## iteration 310. Log-lik: -935200.966
## iteration 311. Log-lik: -935161.29
## iteration 312. Log-lik: -935233.047
## iteration 313. Log-lik: -935238.139
## iteration 314. Log-lik: -935230.48
## iteration 315. Log-lik: -935176.007
## iteration 316. Log-lik: -935139.903
## iteration 317. Log-lik: -935041.359
## iteration 318. Log-lik: -935018.721
## iteration 319. Log-lik: -934988.61
## iteration 320. Log-lik: -934948.415
## iteration 321. Log-lik: -934952.182
## iteration 322. Log-lik: -934926.03
## iteration 323. Log-lik: -934895.087
## iteration 324. Log-lik: -934702.492
## iteration 325. Log-lik: -934622.101
## iteration 326. Log-lik: -934477.138
## iteration 327. Log-lik: -934217.215
## iteration 328. Log-lik: -934145.416
## iteration 329. Log-lik: -934091.981
## iteration 330. Log-lik: -934026.526
## iteration 331. Log-lik: -934018.527
## iteration 332. Log-lik: -933908.481
## iteration 333. Log-lik: -933859.9
## iteration 334. Log-lik: -933847.03
## iteration 335. Log-lik: -933857.403
## iteration 336. Log-lik: -933819.798
## iteration 337. Log-lik: -933760.8
## iteration 338. Log-lik: -933712.979
## iteration 339. Log-lik: -933663.165
## iteration 340. Log-lik: -933663.458
## iteration 341. Log-lik: -933630.318
## iteration 342. Log-lik: -933632.823
## iteration 343. Log-lik: -933583.228
## iteration 344. Log-lik: -933613.738
## iteration 345. Log-lik: -933599.891
## iteration 346. Log-lik: -933602.203
## iteration 347. Log-lik: -933581.765
## iteration 348. Log-lik: -933567.417
## iteration 349. Log-lik: -933561.706
## iteration 350. Log-lik: -933481.464
## iteration 351. Log-lik: -933514.921
## iteration 352. Log-lik: -933559.602
## iteration 353. Log-lik: -933536.978
## iteration 354. Log-lik: -933511.164
## iteration 355. Log-lik: -933609.071
## iteration 356. Log-lik: -933655.264
## iteration 357. Log-lik: -933631.836
## iteration 358. Log-lik: -933582.308
## iteration 359. Log-lik: -933568.059
## iteration 360. Log-lik: -933595.482
## iteration 361. Log-lik: -933642.35
## iteration 362. Log-lik: -933651.097
## iteration 363. Log-lik: -933540.423
## iteration 364. Log-lik: -933453.168
## iteration 365. Log-lik: -933437.687
## iteration 366. Log-lik: -933382.479
## iteration 367. Log-lik: -933390.347
## iteration 368. Log-lik: -933342.038
## iteration 369. Log-lik: -933390.459
## iteration 370. Log-lik: -933438.814
## iteration 371. Log-lik: -933405.701
## iteration 372. Log-lik: -933448.7
## iteration 373. Log-lik: -933348.882
## iteration 374. Log-lik: -933353.919
## iteration 375. Log-lik: -933367.356
## iteration 376. Log-lik: -933368.624
## iteration 377. Log-lik: -933412.473
## iteration 378. Log-lik: -933383.628
## iteration 379. Log-lik: -933359.885
## iteration 380. Log-lik: -933378.373
## iteration 381. Log-lik: -933345.12
## iteration 382. Log-lik: -933386.738
## iteration 383. Log-lik: -933334.205
## iteration 384. Log-lik: -933375.303
## iteration 385. Log-lik: -933394.444
## iteration 386. Log-lik: -933438.195
## iteration 387. Log-lik: -933420.829
## iteration 388. Log-lik: -933341.798
## iteration 389. Log-lik: -933264.619
## iteration 390. Log-lik: -933227.492
## iteration 391. Log-lik: -933243.605
## iteration 392. Log-lik: -933227.804
## iteration 393. Log-lik: -933194.012
## iteration 394. Log-lik: -933223.461
## iteration 395. Log-lik: -933195.995
## iteration 396. Log-lik: -933237.744
## iteration 397. Log-lik: -933183.139
## iteration 398. Log-lik: -933196.038
## iteration 399. Log-lik: -933189.42
## iteration 400. Log-lik: -933250.587
## iteration 401. Log-lik: -933254.431
## iteration 402. Log-lik: -933297.334
## iteration 403. Log-lik: -933232.277
## iteration 404. Log-lik: -933213.898
## iteration 405. Log-lik: -933217.412
## iteration 406. Log-lik: -933200.101
## iteration 407. Log-lik: -933243.953
## iteration 408. Log-lik: -933299.396
## iteration 409. Log-lik: -933222.431
## iteration 410. Log-lik: -933227.267
## iteration 411. Log-lik: -933288.707
## iteration 412. Log-lik: -933210.883
## iteration 413. Log-lik: -933170.831
## iteration 414. Log-lik: -933199.697
## iteration 415. Log-lik: -933227.917
## iteration 416. Log-lik: -933222.418
## iteration 417. Log-lik: -933208.109
## iteration 418. Log-lik: -933210.051
## iteration 419. Log-lik: -933138.822
## iteration 420. Log-lik: -933157.756
## iteration 421. Log-lik: -933097.911
## iteration 422. Log-lik: -933030.014
## iteration 423. Log-lik: -933056.251
## iteration 424. Log-lik: -933096.348
## iteration 425. Log-lik: -933160.046
## iteration 426. Log-lik: -933055.331
## iteration 427. Log-lik: -933037.038
## iteration 428. Log-lik: -933029.981
## iteration 429. Log-lik: -932960.309
## iteration 430. Log-lik: -932936.962
## iteration 431. Log-lik: -932994.326
## iteration 432. Log-lik: -933063.939
## iteration 433. Log-lik: -933022.59
## iteration 434. Log-lik: -933018.954
## iteration 435. Log-lik: -933084.196
## iteration 436. Log-lik: -933051.104
## iteration 437. Log-lik: -933036.288
## iteration 438. Log-lik: -933064.71
## iteration 439. Log-lik: -933019.489
## iteration 440. Log-lik: -933037.847
## iteration 441. Log-lik: -932975.756
## iteration 442. Log-lik: -933018.03
## iteration 443. Log-lik: -933097.66
## iteration 444. Log-lik: -933035.708
## iteration 445. Log-lik: -932971.177
## iteration 446. Log-lik: -932899.48
## iteration 447. Log-lik: -932868.379
## iteration 448. Log-lik: -932795.691
## iteration 449. Log-lik: -932732.98
## iteration 450. Log-lik: -932704.581
## iteration 451. Log-lik: -932678.783
## iteration 452. Log-lik: -932740.753
## iteration 453. Log-lik: -932758.681
## iteration 454. Log-lik: -932813.96
## iteration 455. Log-lik: -932819.596
## iteration 456. Log-lik: -932834.851
## iteration 457. Log-lik: -932913.359
## iteration 458. Log-lik: -932831.03
## iteration 459. Log-lik: -932785.321
## iteration 460. Log-lik: -932790.425
## iteration 461. Log-lik: -932771.482
## iteration 462. Log-lik: -932830.631
## iteration 463. Log-lik: -932886.966
## iteration 464. Log-lik: -932844.211
## iteration 465. Log-lik: -932903.097
## iteration 466. Log-lik: -932867.096
## iteration 467. Log-lik: -932874.973
## iteration 468. Log-lik: -932927.457
## iteration 469. Log-lik: -932870.391
## iteration 470. Log-lik: -932800.618
## iteration 471. Log-lik: -932737.186
## iteration 472. Log-lik: -932731.646
## iteration 473. Log-lik: -932749.982
## iteration 474. Log-lik: -932757.007
## iteration 475. Log-lik: -932805.205
## iteration 476. Log-lik: -932808.634
## iteration 477. Log-lik: -932861.583
## iteration 478. Log-lik: -932765.869
## iteration 479. Log-lik: -932722.469
## iteration 480. Log-lik: -932716.772
## iteration 481. Log-lik: -932731.897
## iteration 482. Log-lik: -932792.494
## iteration 483. Log-lik: -932784.481
## iteration 484. Log-lik: -932833.25
## iteration 485. Log-lik: -932835.061
## iteration 486. Log-lik: -932830.43
## iteration 487. Log-lik: -932857.186
## iteration 488. Log-lik: -932852.695
## iteration 489. Log-lik: -932866.778
## iteration 490. Log-lik: -932895.323
## iteration 491. Log-lik: -932819.287
## iteration 492. Log-lik: -932852.511
## iteration 493. Log-lik: -932794.199
## iteration 494. Log-lik: -932813.037
## iteration 495. Log-lik: -932857.424
## iteration 496. Log-lik: -932868.064
## iteration 497. Log-lik: -932921.412
## iteration 498. Log-lik: -932881.499
## iteration 499. Log-lik: -932747.117
## iteration 500. Log-lik: -932636.243
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## iteration 1
## iteration 2. Log-lik: -1110695.639
## iteration 3. Log-lik: -1105603.931
## iteration 4. Log-lik: -1100033.529
## iteration 5. Log-lik: -1094504.331
## iteration 6. Log-lik: -1088926.724
## iteration 7. Log-lik: -1082530.656
## iteration 8. Log-lik: -1075277.768
## iteration 9. Log-lik: -1067626.03
## iteration 10. Log-lik: -1059991.28
## iteration 11. Log-lik: -1052510.237
## iteration 12. Log-lik: -1045287.379
## iteration 13. Log-lik: -1038262.2
## iteration 14. Log-lik: -1031801.629
## iteration 15. Log-lik: -1025800.383
## iteration 16. Log-lik: -1020495.804
## iteration 17. Log-lik: -1015419.654
## iteration 18. Log-lik: -1010610.846
## iteration 19. Log-lik: -1006008.981
## iteration 20. Log-lik: -1001709.453
## iteration 21. Log-lik: -997858.317
## iteration 22. Log-lik: -994137.925
## iteration 23. Log-lik: -990678.813
## iteration 24. Log-lik: -987349.157
## iteration 25. Log-lik: -984546.899
## iteration 26. Log-lik: -981840.632
## iteration 27. Log-lik: -979344.929
## iteration 28. Log-lik: -977042.486
## iteration 29. Log-lik: -974989.668
## iteration 30. Log-lik: -973105.309
## iteration 31. Log-lik: -971341.221
## iteration 32. Log-lik: -969694.138
## iteration 33. Log-lik: -968318.718
## iteration 34. Log-lik: -966933.721
## iteration 35. Log-lik: -965650.512
## iteration 36. Log-lik: -964442.209
## iteration 37. Log-lik: -963490.866
## iteration 38. Log-lik: -962510.289
## iteration 39. Log-lik: -961697.081
## iteration 40. Log-lik: -960902.891
## iteration 41. Log-lik: -960062.697
## iteration 42. Log-lik: -959337.75
## iteration 43. Log-lik: -958590.563
## iteration 44. Log-lik: -957945.525
## iteration 45. Log-lik: -957339.006
## iteration 46. Log-lik: -956735.273
## iteration 47. Log-lik: -956207.889
## iteration 48. Log-lik: -955624.384
## iteration 49. Log-lik: -955116.049
## iteration 50. Log-lik: -954595.414
## iteration 51. Log-lik: -954103.318
## iteration 52. Log-lik: -953655.811
## iteration 53. Log-lik: -953142.611
## iteration 54. Log-lik: -952706.874
## iteration 55. Log-lik: -952231.392
## iteration 56. Log-lik: -951799.333
## iteration 57. Log-lik: -951364.423
## iteration 58. Log-lik: -950944.408
## iteration 59. Log-lik: -950589.115
## iteration 60. Log-lik: -950185.636
## iteration 61. Log-lik: -949805.159
## iteration 62. Log-lik: -949484.534
## iteration 63. Log-lik: -949119.325
## iteration 64. Log-lik: -948650.009
## iteration 65. Log-lik: -948367.063
## iteration 66. Log-lik: -948033.673
## iteration 67. Log-lik: -947736.101
## iteration 68. Log-lik: -947428.987
## iteration 69. Log-lik: -947131.656
## iteration 70. Log-lik: -946835.732
## iteration 71. Log-lik: -946531.461
## iteration 72. Log-lik: -946309.808
## iteration 73. Log-lik: -946038.37
## iteration 74. Log-lik: -945829.66
## iteration 75. Log-lik: -945553.922
## iteration 76. Log-lik: -945293.304
## iteration 77. Log-lik: -945030.799
## iteration 78. Log-lik: -944787.137
## iteration 79. Log-lik: -944562.492
## iteration 80. Log-lik: -944334.07
## iteration 81. Log-lik: -944113.958
## iteration 82. Log-lik: -943939.77
## iteration 83. Log-lik: -943762.456
## iteration 84. Log-lik: -943586.605
## iteration 85. Log-lik: -943380.505
## iteration 86. Log-lik: -943145.393
## iteration 87. Log-lik: -942933.207
## iteration 88. Log-lik: -942756.025
## iteration 89. Log-lik: -942520.755
## iteration 90. Log-lik: -942328.45
## iteration 91. Log-lik: -942163.886
## iteration 92. Log-lik: -941990.556
## iteration 93. Log-lik: -941816.903
## iteration 94. Log-lik: -941647.954
## iteration 95. Log-lik: -941454.356
## iteration 96. Log-lik: -941315.501
## iteration 97. Log-lik: -941206.834
## iteration 98. Log-lik: -941087.195
## iteration 99. Log-lik: -940925.212
## iteration 100. Log-lik: -940805.76
## iteration 101. Log-lik: -940643.54
## iteration 102. Log-lik: -940520.177
## iteration 103. Log-lik: -940375.524
## iteration 104. Log-lik: -940266.145
## iteration 105. Log-lik: -940141.479
## iteration 106. Log-lik: -940049.211
## iteration 107. Log-lik: -939919.586
## iteration 108. Log-lik: -939773.95
## iteration 109. Log-lik: -939690.471
## iteration 110. Log-lik: -939628.534
## iteration 111. Log-lik: -939490.944
## iteration 112. Log-lik: -939346.625
## iteration 113. Log-lik: -939267.999
## iteration 114. Log-lik: -939124.914
## iteration 115. Log-lik: -938998.358
## iteration 116. Log-lik: -938883.43
## iteration 117. Log-lik: -938772.763
## iteration 118. Log-lik: -938723.475
## iteration 119. Log-lik: -938637.878
## iteration 120. Log-lik: -938581.814
## iteration 121. Log-lik: -938508.568
## iteration 122. Log-lik: -938458.322
## iteration 123. Log-lik: -938340.016
## iteration 124. Log-lik: -938294.021
## iteration 125. Log-lik: -938226.166
## iteration 126. Log-lik: -938091.555
## iteration 127. Log-lik: -938029.979
## iteration 128. Log-lik: -937921.368
## iteration 129. Log-lik: -937840.447
## iteration 130. Log-lik: -937752.591
## iteration 131. Log-lik: -937708.938
## iteration 132. Log-lik: -937644.639
## iteration 133. Log-lik: -937578.607
## iteration 134. Log-lik: -937508.478
## iteration 135. Log-lik: -937459.97
## iteration 136. Log-lik: -937390.458
## iteration 137. Log-lik: -937308.514
## iteration 138. Log-lik: -937194.311
## iteration 139. Log-lik: -937116.75
## iteration 140. Log-lik: -937050.777
## iteration 141. Log-lik: -936972.413
## iteration 142. Log-lik: -936918.384
## iteration 143. Log-lik: -936878.767
## iteration 144. Log-lik: -936864.368
## iteration 145. Log-lik: -936782.608
## iteration 146. Log-lik: -936707.561
## iteration 147. Log-lik: -936656.889
## iteration 148. Log-lik: -936598.404
## iteration 149. Log-lik: -936543.618
## iteration 150. Log-lik: -936477.489
## iteration 151. Log-lik: -936428.53
## iteration 152. Log-lik: -936376.138
## iteration 153. Log-lik: -936311.994
## iteration 154. Log-lik: -936282.555
## iteration 155. Log-lik: -936256.412
## iteration 156. Log-lik: -936208.595
## iteration 157. Log-lik: -936175.153
## iteration 158. Log-lik: -936127.906
## iteration 159. Log-lik: -936087.925
## iteration 160. Log-lik: -936039.919
## iteration 161. Log-lik: -935999.33
## iteration 162. Log-lik: -935940.066
## iteration 163. Log-lik: -935902.623
## iteration 164. Log-lik: -935841.609
## iteration 165. Log-lik: -935806.499
## iteration 166. Log-lik: -935774.915
## iteration 167. Log-lik: -935769.791
## iteration 168. Log-lik: -935743.867
## iteration 169. Log-lik: -935697.071
## iteration 170. Log-lik: -935670.625
## iteration 171. Log-lik: -935639.673
## iteration 172. Log-lik: -935615.299
## iteration 173. Log-lik: -935583.376
## iteration 174. Log-lik: -935566.739
## iteration 175. Log-lik: -935535.647
## iteration 176. Log-lik: -935498.075
## iteration 177. Log-lik: -935471.299
## iteration 178. Log-lik: -935439.147
## iteration 179. Log-lik: -935388.693
## iteration 180. Log-lik: -935351.785
## iteration 181. Log-lik: -935314.289
## iteration 182. Log-lik: -935300.772
## iteration 183. Log-lik: -935272.609
## iteration 184. Log-lik: -935245.367
## iteration 185. Log-lik: -935212.777
## iteration 186. Log-lik: -935184.392
## iteration 187. Log-lik: -935161.069
## iteration 188. Log-lik: -935133.091
## iteration 189. Log-lik: -935107.665
## iteration 190. Log-lik: -935080.348
## iteration 191. Log-lik: -935055.599
## iteration 192. Log-lik: -935033.426
## iteration 193. Log-lik: -935009.383
## iteration 194. Log-lik: -934988.465
## iteration 195. Log-lik: -934934.65
## iteration 196. Log-lik: -934923.792
## iteration 197. Log-lik: -934913.526
## iteration 198. Log-lik: -934879.623
## iteration 199. Log-lik: -934860.819
## iteration 200. Log-lik: -934852.284
## iteration 201. Log-lik: -934834.152
## iteration 202. Log-lik: -934798.588
## iteration 203. Log-lik: -934775.349
## iteration 204. Log-lik: -934744.005
## iteration 205. Log-lik: -934730.141
## iteration 206. Log-lik: -934723.398
## iteration 207. Log-lik: -934704.316
## iteration 208. Log-lik: -934684.59
## iteration 209. Log-lik: -934666.205
## iteration 210. Log-lik: -934638.087
## iteration 211. Log-lik: -934610.736
## iteration 212. Log-lik: -934593.52
## iteration 213. Log-lik: -934562.322
## iteration 214. Log-lik: -934559.55
## iteration 215. Log-lik: -934554.115
## iteration 216. Log-lik: -934508.357
## iteration 217. Log-lik: -934501.285
## iteration 218. Log-lik: -934492.989
## iteration 219. Log-lik: -934473.523
## iteration 220. Log-lik: -934453.687
## iteration 221. Log-lik: -934424.999
## iteration 222. Log-lik: -934416.617
## iteration 223. Log-lik: -934384.619
## iteration 224. Log-lik: -934372.89
## iteration 225. Log-lik: -934364.904
## iteration 226. Log-lik: -934350.697
## iteration 227. Log-lik: -934336.814
## iteration 228. Log-lik: -934319.595
## iteration 229. Log-lik: -934300.27
## iteration 230. Log-lik: -934288.063
## iteration 231. Log-lik: -934262.959
## iteration 232. Log-lik: -934228.183
## iteration 233. Log-lik: -934223.401
## iteration 234. Log-lik: -934209.276
## iteration 235. Log-lik: -934192.715
## iteration 236. Log-lik: -934174.644
## iteration 237. Log-lik: -934162.194
## iteration 238. Log-lik: -934145.117
## iteration 239. Log-lik: -934141.3
## iteration 240. Log-lik: -934133.257
## iteration 241. Log-lik: -934121.931
## iteration 242. Log-lik: -934121.389
## iteration 243. Log-lik: -934103.559
## iteration 244. Log-lik: -934084.157
## iteration 245. Log-lik: -934075.989
## iteration 246. Log-lik: -934049.977
## iteration 247. Log-lik: -934024.459
## iteration 248. Log-lik: -934004.518
## iteration 249. Log-lik: -933988.144
## iteration 250. Log-lik: -933977.883
## iteration 251. Log-lik: -933962.333
## iteration 252. Log-lik: -933946.361
## iteration 253. Log-lik: -933941.416
## iteration 254. Log-lik: -933928.266
## iteration 255. Log-lik: -933909.93
## iteration 256. Log-lik: -933886.439
## iteration 257. Log-lik: -933873.952
## iteration 258. Log-lik: -933863.837
## iteration 259. Log-lik: -933853.797
## iteration 260. Log-lik: -933845.906
## iteration 261. Log-lik: -933845.181
## iteration 262. Log-lik: -933833.626
## iteration 263. Log-lik: -933820.376
## iteration 264. Log-lik: -933799.132
## iteration 265. Log-lik: -933781.96
## iteration 266. Log-lik: -933775.875
## iteration 267. Log-lik: -933751.534
## iteration 268. Log-lik: -933746.459
## iteration 269. Log-lik: -933731.43
## iteration 270. Log-lik: -933718.855
## iteration 271. Log-lik: -933697.578
## iteration 272. Log-lik: -933687.901
## iteration 273. Log-lik: -933678.119
## iteration 274. Log-lik: -933673.27
## iteration 275. Log-lik: -933653.652
## iteration 276. Log-lik: -933640.924
## iteration 277. Log-lik: -933631.848
## iteration 278. Log-lik: -933626.231
## iteration 279. Log-lik: -933609.123
## iteration 280. Log-lik: -933603.542
## iteration 281. Log-lik: -933594.355
## iteration 282. Log-lik: -933593.28
## iteration 283. Log-lik: -933579.75
## iteration 284. Log-lik: -933569.898
## iteration 285. Log-lik: -933560.227
## iteration 286. Log-lik: -933542.739
## iteration 287. Log-lik: -933537.644
## iteration 288. Log-lik: -933532.467
## iteration 289. Log-lik: -933513.875
## iteration 290. Log-lik: -933501.059
## iteration 291. Log-lik: -933489.16
## iteration 292. Log-lik: -933487.345
## iteration 293. Log-lik: -933469.997
## iteration 294. Log-lik: -933463.185
## iteration 295. Log-lik: -933465.977
## iteration 296. Log-lik: -933456.745
## iteration 297. Log-lik: -933438.048
## iteration 298. Log-lik: -933427.579
## iteration 299. Log-lik: -933421.577
## iteration 300. Log-lik: -933416.144
## iteration 301. Log-lik: -933403.978
## iteration 302. Log-lik: -933398.582
## iteration 303. Log-lik: -933393.409
## iteration 304. Log-lik: -933388.117
## iteration 305. Log-lik: -933376.118
## iteration 306. Log-lik: -933374.071
## iteration 307. Log-lik: -933372.696
## iteration 308. Log-lik: -933363.287
## iteration 309. Log-lik: -933361.291
## iteration 310. Log-lik: -933347.86
## iteration 311. Log-lik: -933333.58
## iteration 312. Log-lik: -933311.75
## iteration 313. Log-lik: -933289.931
## iteration 314. Log-lik: -933284.253
## iteration 315. Log-lik: -933278.638
## iteration 316. Log-lik: -933276.487
## iteration 317. Log-lik: -933274.342
## iteration 318. Log-lik: -933269.151
## iteration 319. Log-lik: -933271.611
## iteration 320. Log-lik: -933265.5
## iteration 321. Log-lik: -933248.573
## iteration 322. Log-lik: -933236.557
## iteration 323. Log-lik: -933233.66
## iteration 324. Log-lik: -933226.821
## iteration 325. Log-lik: -933221.125
## iteration 326. Log-lik: -933203.668
## iteration 327. Log-lik: -933205.466
## iteration 328. Log-lik: -933199.97
## iteration 329. Log-lik: -933194.417
## iteration 330. Log-lik: -933180.452
## iteration 331. Log-lik: -933167.463
## iteration 332. Log-lik: -933156.602
## iteration 333. Log-lik: -933154.377
## iteration 334. Log-lik: -933124.915
## iteration 335. Log-lik: -933118.597
## iteration 336. Log-lik: -933116.844
## iteration 337. Log-lik: -933107.197
## iteration 338. Log-lik: -933097.124
## iteration 339. Log-lik: -933099.381
## iteration 340. Log-lik: -933081.183
## iteration 341. Log-lik: -933068.753
## iteration 342. Log-lik: -933062.262
## iteration 343. Log-lik: -933060.661
## iteration 344. Log-lik: -933054.675
## iteration 345. Log-lik: -933052.791
## iteration 346. Log-lik: -933051.034
## iteration 347. Log-lik: -933041.288
## iteration 348. Log-lik: -933026.291
## iteration 349. Log-lik: -933023.771
## iteration 350. Log-lik: -933009.251
## iteration 351. Log-lik: -933000.831
## iteration 352. Log-lik: -932987.899
## iteration 353. Log-lik: -932986.059
## iteration 354. Log-lik: -932980.224
## iteration 355. Log-lik: -932978.501
## iteration 356. Log-lik: -932972.06
## iteration 357. Log-lik: -932963.397
## iteration 358. Log-lik: -932961.477
## iteration 359. Log-lik: -932947.719
## iteration 360. Log-lik: -932925.124
## iteration 361. Log-lik: -932919.006
## iteration 362. Log-lik: -932917.18
## iteration 363. Log-lik: -932919.179
## iteration 364. Log-lik: -932917.158
## iteration 365. Log-lik: -932915.191
## iteration 366. Log-lik: -932909.238
## iteration 367. Log-lik: -932884.292
## iteration 368. Log-lik: -932877.96
## iteration 369. Log-lik: -932877.255
## iteration 370. Log-lik: -932863.051
## iteration 371. Log-lik: -932852.744
## iteration 372. Log-lik: -932846.009
## iteration 373. Log-lik: -932843.962
## iteration 374. Log-lik: -932829.771
## iteration 375. Log-lik: -932831.684
## iteration 376. Log-lik: -932825.607
## iteration 377. Log-lik: -932827.5
## iteration 378. Log-lik: -932809.381
## iteration 379. Log-lik: -932811.25
## iteration 380. Log-lik: -932804.803
## iteration 381. Log-lik: -932798.039
## iteration 382. Log-lik: -932795.872
## iteration 383. Log-lik: -932789.217
## iteration 384. Log-lik: -932769.953
## iteration 385. Log-lik: -932763.941
## iteration 386. Log-lik: -932753.992
## iteration 387. Log-lik: -932747.889
## iteration 388. Log-lik: -932745.46
## iteration 389. Log-lik: -932734.552
## iteration 390. Log-lik: -932728.359
## iteration 391. Log-lik: -932725.887
## iteration 392. Log-lik: -932723.562
## iteration 393. Log-lik: -932717.285
## iteration 394. Log-lik: -932715.016
## iteration 395. Log-lik: -932708.681
## iteration 396. Log-lik: -932698.225
## iteration 397. Log-lik: -932699.889
## iteration 398. Log-lik: -932701.55
## iteration 399. Log-lik: -932692.396
## iteration 400. Log-lik: -932689.419
## iteration 401. Log-lik: -932683.072
## iteration 402. Log-lik: -932680.753
## iteration 403. Log-lik: -932674.35
## iteration 404. Log-lik: -932675.995
## iteration 405. Log-lik: -932673.664
## iteration 406. Log-lik: -932671.341
## iteration 407. Log-lik: -932672.335
## iteration 408. Log-lik: -932669.934
## iteration 409. Log-lik: -932659.269
## iteration 410. Log-lik: -932648.936
## iteration 411. Log-lik: -932645.991
## iteration 412. Log-lik: -932641.621
## iteration 413. Log-lik: -932618.615
## iteration 414. Log-lik: -932612.172
## iteration 415. Log-lik: -932609.73
## iteration 416. Log-lik: -932599.03
## iteration 417. Log-lik: -932584.093
## iteration 418. Log-lik: -932581.637
## iteration 419. Log-lik: -932583.132
## iteration 420. Log-lik: -932568.314
## iteration 421. Log-lik: -932569.791
## iteration 422. Log-lik: -932571.266
## iteration 423. Log-lik: -932560.543
## iteration 424. Log-lik: -932558.022
## iteration 425. Log-lik: -932547.484
## iteration 426. Log-lik: -932548.922
## iteration 427. Log-lik: -932550.358
## iteration 428. Log-lik: -932539.264
## iteration 429. Log-lik: -932532.895
## iteration 430. Log-lik: -932534.309
## iteration 431. Log-lik: -932535.721
## iteration 432. Log-lik: -932533.084
## iteration 433. Log-lik: -932522.471
## iteration 434. Log-lik: -932519.255
## iteration 435. Log-lik: -932508.45
## iteration 436. Log-lik: -932496.522
## iteration 437. Log-lik: -932489.801
## iteration 438. Log-lik: -932491.135
## iteration 439. Log-lik: -932488.447
## iteration 440. Log-lik: -932485.774
## iteration 441. Log-lik: -932487.092
## iteration 442. Log-lik: -932480.323
## iteration 443. Log-lik: -932477.965
## iteration 444. Log-lik: -932472.493
## iteration 445. Log-lik: -932469.829
## iteration 446. Log-lik: -932467.09
## iteration 447. Log-lik: -932468.392
## iteration 448. Log-lik: -932457.682
## iteration 449. Log-lik: -932450.728
## iteration 450. Log-lik: -932452.004
## iteration 451. Log-lik: -932445.456
## iteration 452. Log-lik: -932442.729
## iteration 453. Log-lik: -932431.658
## iteration 454. Log-lik: -932432.911
## iteration 455. Log-lik: -932434.162
## iteration 456. Log-lik: -932427.291
## iteration 457. Log-lik: -932428.529
## iteration 458. Log-lik: -932421.831
## iteration 459. Log-lik: -932423.057
## iteration 460. Log-lik: -932424.282
## iteration 461. Log-lik: -932409.032
## iteration 462. Log-lik: -932406.213
## iteration 463. Log-lik: -932394.666
## iteration 464. Log-lik: -932395.869
## iteration 465. Log-lik: -932388.457
## iteration 466. Log-lik: -932381.709
## iteration 467. Log-lik: -932374.808
## iteration 468. Log-lik: -932375.984
## iteration 469. Log-lik: -932364.861
## iteration 470. Log-lik: -932362.034
## iteration 471. Log-lik: -932351.059
## iteration 472. Log-lik: -932339.939
## iteration 473. Log-lik: -932328.955
## iteration 474. Log-lik: -932322.109
## iteration 475. Log-lik: -932319.169
## iteration 476. Log-lik: -932320.273
## iteration 477. Log-lik: -932321.375
## iteration 478. Log-lik: -932318.378
## iteration 479. Log-lik: -932315.425
## iteration 480. Log-lik: -932316.515
## iteration 481. Log-lik: -932313.632
## iteration 482. Log-lik: -932314.716
## iteration 483. Log-lik: -932307.203
## iteration 484. Log-lik: -932304.301
## iteration 485. Log-lik: -932302.708
## iteration 486. Log-lik: -932303.798
## iteration 487. Log-lik: -932304.887
## iteration 488. Log-lik: -932305.974
## iteration 489. Log-lik: -932302.987
## iteration 490. Log-lik: -932300.081
## iteration 491. Log-lik: -932305.204
## iteration 492. Log-lik: -932306.28
## iteration 493. Log-lik: -932303.378
## iteration 494. Log-lik: -932300.459
## iteration 495. Log-lik: -932301.524
## iteration 496. Log-lik: -932298.569
## iteration 497. Log-lik: -932295.586
## iteration 498. Log-lik: -932292.576
## iteration 499. Log-lik: -932289.796
## iteration 500. Log-lik: -932290.844
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## iteration 1
## iteration 2. Log-lik: -1111606.261
## iteration 3. Log-lik: -1110354.966
## iteration 4. Log-lik: -1107859.023
## iteration 5. Log-lik: -1105978.149
## iteration 6. Log-lik: -1104528.051
## iteration 7. Log-lik: -1103306.797
## iteration 8. Log-lik: -1102328.583
## iteration 9. Log-lik: -1101402.488
## iteration 10. Log-lik: -1100630.992
## iteration 11. Log-lik: -1099835.433
## iteration 12. Log-lik: -1099236.026
## iteration 13. Log-lik: -1098717.09
## iteration 14. Log-lik: -1098314.849
## iteration 15. Log-lik: -1097994.955
## iteration 16. Log-lik: -1097779.011
## iteration 17. Log-lik: -1097615.426
## iteration 18. Log-lik: -1097469.25
## iteration 19. Log-lik: -1097380.903
## iteration 20. Log-lik: -1097335.85
## iteration 21. Log-lik: -1097276.596
## iteration 22. Log-lik: -1097209.868
## iteration 23. Log-lik: -1097177.965
## iteration 24. Log-lik: -1097131.226
## iteration 25. Log-lik: -1097125.027
## iteration 26. Log-lik: -1097102.552
## iteration 27. Log-lik: -1097072.561
## iteration 28. Log-lik: -1097052.768
## iteration 29. Log-lik: -1097038.596
## iteration 30. Log-lik: -1097036.784
## iteration 31. Log-lik: -1097034.436
## iteration 32. Log-lik: -1097020.59
## iteration 33. Log-lik: -1097016.961
## iteration 34. Log-lik: -1097006.778
## iteration 35. Log-lik: -1096998.492
## iteration 36. Log-lik: -1096994.863
## iteration 37. Log-lik: -1096995.785
## iteration 38. Log-lik: -1096984.583
## iteration 39. Log-lik: -1096980.665
## iteration 40. Log-lik: -1096981.319
## iteration 41. Log-lik: -1096972.943
## iteration 42. Log-lik: -1096973.422
## iteration 43. Log-lik: -1096971.166
## iteration 44. Log-lik: -1096971.554
## iteration 45. Log-lik: -1096971.901
## iteration 46. Log-lik: -1096972.213
## iteration 47. Log-lik: -1096972.495
## iteration 48. Log-lik: -1096972.75
## iteration 49. Log-lik: -1096972.982
## iteration 50. Log-lik: -1096973.194
## iteration 51. Log-lik: -1096968.545
## iteration 52. Log-lik: -1096968.713
## iteration 53. Log-lik: -1096968.868
## iteration 54. Log-lik: -1096969.01
## iteration 55. Log-lik: -1096969.14
## iteration 56. Log-lik: -1096964.378
## iteration 57. Log-lik: -1096964.486
## iteration 58. Log-lik: -1096964.586
## iteration 59. Log-lik: -1096959.691
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## iteration 1
## iteration 2. Log-lik: -1110696.232
## iteration 3. Log-lik: -1104970.222
## iteration 4. Log-lik: -1099559.38
## iteration 5. Log-lik: -1094495.333
## iteration 6. Log-lik: -1089546.408
## iteration 7. Log-lik: -1084062.717
## iteration 8. Log-lik: -1077524.316
## iteration 9. Log-lik: -1070584.989
## iteration 10. Log-lik: -1063937.784
## iteration 11. Log-lik: -1056975.341
## iteration 12. Log-lik: -1050382.408
## iteration 13. Log-lik: -1044292.157
## iteration 14. Log-lik: -1038892.203
## iteration 15. Log-lik: -1033684.082
## iteration 16. Log-lik: -1028748.767
## iteration 17. Log-lik: -1024323.96
## iteration 18. Log-lik: -1019534.704
## iteration 19. Log-lik: -1014943.108
## iteration 20. Log-lik: -1010555.187
## iteration 21. Log-lik: -1006377.348
## iteration 22. Log-lik: -1002220.814
## iteration 23. Log-lik: -998463.969
## iteration 24. Log-lik: -994606.921
## iteration 25. Log-lik: -991257.188
## iteration 26. Log-lik: -988333.659
## iteration 27. Log-lik: -985398.162
## iteration 28. Log-lik: -982746.45
## iteration 29. Log-lik: -980246.335
## iteration 30. Log-lik: -978043.855
## iteration 31. Log-lik: -976106.27
## iteration 32. Log-lik: -973986.506
## iteration 33. Log-lik: -972105.166
## iteration 34. Log-lik: -970539.415
## iteration 35. Log-lik: -968981.306
## iteration 36. Log-lik: -967576.749
## iteration 37. Log-lik: -966516.891
## iteration 38. Log-lik: -965407.962
## iteration 39. Log-lik: -964464.133
## iteration 40. Log-lik: -963637.676
## iteration 41. Log-lik: -962962.308
## iteration 42. Log-lik: -962117.667
## iteration 43. Log-lik: -961295.231
## iteration 44. Log-lik: -960608.341
## iteration 45. Log-lik: -960029.358
## iteration 46. Log-lik: -959262.922
## iteration 47. Log-lik: -958590.514
## iteration 48. Log-lik: -957990.619
## iteration 49. Log-lik: -957262.73
## iteration 50. Log-lik: -956496.619
## iteration 51. Log-lik: -955829.941
## iteration 52. Log-lik: -955183.745
## iteration 53. Log-lik: -954528.618
## iteration 54. Log-lik: -954088.86
## iteration 55. Log-lik: -953630.822
## iteration 56. Log-lik: -953225.172
## iteration 57. Log-lik: -952864.957
## iteration 58. Log-lik: -952480.146
## iteration 59. Log-lik: -952123.7
## iteration 60. Log-lik: -951640.237
## iteration 61. Log-lik: -951357.003
## iteration 62. Log-lik: -950972.375
## iteration 63. Log-lik: -950652.123
## iteration 64. Log-lik: -950247.192
## iteration 65. Log-lik: -949817.126
## iteration 66. Log-lik: -949420.455
## iteration 67. Log-lik: -949157.921
## iteration 68. Log-lik: -948756.964
## iteration 69. Log-lik: -948469.273
## iteration 70. Log-lik: -948170.433
## iteration 71. Log-lik: -947868.326
## iteration 72. Log-lik: -947554.925
## iteration 73. Log-lik: -947281.735
## iteration 74. Log-lik: -947027.001
## iteration 75. Log-lik: -946840.384
## iteration 76. Log-lik: -946577.97
## iteration 77. Log-lik: -946373.153
## iteration 78. Log-lik: -946191.607
## iteration 79. Log-lik: -945998.071
## iteration 80. Log-lik: -945752.087
## iteration 81. Log-lik: -945517.672
## iteration 82. Log-lik: -945263.664
## iteration 83. Log-lik: -945053.348
## iteration 84. Log-lik: -944893.423
## iteration 85. Log-lik: -944777.778
## iteration 86. Log-lik: -944601.41
## iteration 87. Log-lik: -944398.397
## iteration 88. Log-lik: -944232.7
## iteration 89. Log-lik: -943967.12
## iteration 90. Log-lik: -943719.929
## iteration 91. Log-lik: -943610.392
## iteration 92. Log-lik: -943380.87
## iteration 93. Log-lik: -943304.498
## iteration 94. Log-lik: -943150.223
## iteration 95. Log-lik: -942968.96
## iteration 96. Log-lik: -942785.201
## iteration 97. Log-lik: -942569.303
## iteration 98. Log-lik: -942363.462
## iteration 99. Log-lik: -942223.95
## iteration 100. Log-lik: -942128.701
## iteration 101. Log-lik: -942004.789
## iteration 102. Log-lik: -941894.157
## iteration 103. Log-lik: -941739.856
## iteration 104. Log-lik: -941580.26
## iteration 105. Log-lik: -941398.508
## iteration 106. Log-lik: -941262.35
## iteration 107. Log-lik: -941100.181
## iteration 108. Log-lik: -940939.04
## iteration 109. Log-lik: -940871.354
## iteration 110. Log-lik: -940777.349
## iteration 111. Log-lik: -940600.341
## iteration 112. Log-lik: -940545.784
## iteration 113. Log-lik: -940466.414
## iteration 114. Log-lik: -940281.465
## iteration 115. Log-lik: -940148.955
## iteration 116. Log-lik: -940035.016
## iteration 117. Log-lik: -939952.194
## iteration 118. Log-lik: -939877.9
## iteration 119. Log-lik: -939872.837
## iteration 120. Log-lik: -939882.224
## iteration 121. Log-lik: -939795.652
## iteration 122. Log-lik: -939670.743
## iteration 123. Log-lik: -939472.634
## iteration 124. Log-lik: -939356.406
## iteration 125. Log-lik: -939205.974
## iteration 126. Log-lik: -939075.375
## iteration 127. Log-lik: -939002.829
## iteration 128. Log-lik: -938957.953
## iteration 129. Log-lik: -938954.707
## iteration 130. Log-lik: -938882.439
## iteration 131. Log-lik: -938783.39
## iteration 132. Log-lik: -938677.21
## iteration 133. Log-lik: -938529.5
## iteration 134. Log-lik: -938469.731
## iteration 135. Log-lik: -938433.212
## iteration 136. Log-lik: -938399.464
## iteration 137. Log-lik: -938344.512
## iteration 138. Log-lik: -938275.151
## iteration 139. Log-lik: -938229.737
## iteration 140. Log-lik: -938159.582
## iteration 141. Log-lik: -938161.521
## iteration 142. Log-lik: -938178.173
## iteration 143. Log-lik: -938100.918
## iteration 144. Log-lik: -937961.113
## iteration 145. Log-lik: -937875.799
## iteration 146. Log-lik: -937847.524
## iteration 147. Log-lik: -937813.139
## iteration 148. Log-lik: -937731.463
## iteration 149. Log-lik: -937745.636
## iteration 150. Log-lik: -937756.069
## iteration 151. Log-lik: -937653.268
## iteration 152. Log-lik: -937558.918
## iteration 153. Log-lik: -937476.38
## iteration 154. Log-lik: -937335.382
## iteration 155. Log-lik: -937150.988
## iteration 156. Log-lik: -937051.498
## iteration 157. Log-lik: -936994.63
## iteration 158. Log-lik: -936908.321
## iteration 159. Log-lik: -936807.9
## iteration 160. Log-lik: -936764.241
## iteration 161. Log-lik: -936677.685
## iteration 162. Log-lik: -936629.21
## iteration 163. Log-lik: -936498.339
## iteration 164. Log-lik: -936432.126
## iteration 165. Log-lik: -936366.815
## iteration 166. Log-lik: -936324.682
## iteration 167. Log-lik: -936236.376
## iteration 168. Log-lik: -936229
## iteration 169. Log-lik: -936267.006
## iteration 170. Log-lik: -936250.254
## iteration 171. Log-lik: -936235.802
## iteration 172. Log-lik: -936248.091
## iteration 173. Log-lik: -936144.248
## iteration 174. Log-lik: -936055.124
## iteration 175. Log-lik: -936002.182
## iteration 176. Log-lik: -935948.808
## iteration 177. Log-lik: -935984.497
## iteration 178. Log-lik: -935946.13
## iteration 179. Log-lik: -935958.62
## iteration 180. Log-lik: -935896.944
## iteration 181. Log-lik: -935837.796
## iteration 182. Log-lik: -935884.714
## iteration 183. Log-lik: -935772.655
## iteration 184. Log-lik: -935606.463
## iteration 185. Log-lik: -935551.928
## iteration 186. Log-lik: -935536.333
## iteration 187. Log-lik: -935568.924
## iteration 188. Log-lik: -935575.079
## iteration 189. Log-lik: -935488.726
## iteration 190. Log-lik: -935446.792
## iteration 191. Log-lik: -935390.558
## iteration 192. Log-lik: -935455.608
## iteration 193. Log-lik: -935411.209
## iteration 194. Log-lik: -935359.577
## iteration 195. Log-lik: -935325.422
## iteration 196. Log-lik: -935381.623
## iteration 197. Log-lik: -935273.191
## iteration 198. Log-lik: -935221.622
## iteration 199. Log-lik: -935216.519
## iteration 200. Log-lik: -935284.976
## iteration 201. Log-lik: -935276.69
## iteration 202. Log-lik: -935264.879
## iteration 203. Log-lik: -935197.556
## iteration 204. Log-lik: -935146.72
## iteration 205. Log-lik: -935073.643
## iteration 206. Log-lik: -935060.285
## iteration 207. Log-lik: -935051.856
## iteration 208. Log-lik: -935080.195
## iteration 209. Log-lik: -935100.015
## iteration 210. Log-lik: -935086.24
## iteration 211. Log-lik: -935085.319
## iteration 212. Log-lik: -934998.918
## iteration 213. Log-lik: -934932.975
## iteration 214. Log-lik: -934831.275
## iteration 215. Log-lik: -934816.771
## iteration 216. Log-lik: -934801.35
## iteration 217. Log-lik: -934903.222
## iteration 218. Log-lik: -935047.343
## iteration 219. Log-lik: -934967.34
## iteration 220. Log-lik: -934941.299
## iteration 221. Log-lik: -934896.45
## iteration 222. Log-lik: -934945.049
## iteration 223. Log-lik: -934888.31
## iteration 224. Log-lik: -934855.544
## iteration 225. Log-lik: -934880.275
## iteration 226. Log-lik: -934885.431
## iteration 227. Log-lik: -934852.269
## iteration 228. Log-lik: -934854.152
## iteration 229. Log-lik: -934784.225
## iteration 230. Log-lik: -934789.163
## iteration 231. Log-lik: -934752.795
## iteration 232. Log-lik: -934732.788
## iteration 233. Log-lik: -934698.5
## iteration 234. Log-lik: -934638.482
## iteration 235. Log-lik: -934569.355
## iteration 236. Log-lik: -934534.614
## iteration 237. Log-lik: -934539.544
## iteration 238. Log-lik: -934551.999
## iteration 239. Log-lik: -934595.59
## iteration 240. Log-lik: -934546.736
## iteration 241. Log-lik: -934582.018
## iteration 242. Log-lik: -934513.839
## iteration 243. Log-lik: -934472.998
## iteration 244. Log-lik: -934428.109
## iteration 245. Log-lik: -934409.802
## iteration 246. Log-lik: -934428.265
## iteration 247. Log-lik: -934478.282
## iteration 248. Log-lik: -934507.594
## iteration 249. Log-lik: -934408.764
## iteration 250. Log-lik: -934444.549
## iteration 251. Log-lik: -934356.17
## iteration 252. Log-lik: -934317.706
## iteration 253. Log-lik: -934275.748
## iteration 254. Log-lik: -934201.594
## iteration 255. Log-lik: -934184.215
## iteration 256. Log-lik: -934073.056
## iteration 257. Log-lik: -934064.24
## iteration 258. Log-lik: -934058.962
## iteration 259. Log-lik: -934102.014
## iteration 260. Log-lik: -934167.117
## iteration 261. Log-lik: -934187.449
## iteration 262. Log-lik: -934265.179
## iteration 263. Log-lik: -934273.807
## iteration 264. Log-lik: -934116.478
## iteration 265. Log-lik: -934039.336
## iteration 266. Log-lik: -934051.966
## iteration 267. Log-lik: -934060.493
## iteration 268. Log-lik: -934097.367
## iteration 269. Log-lik: -934153.573
## iteration 270. Log-lik: -934161.976
## iteration 271. Log-lik: -934143.046
## iteration 272. Log-lik: -934165.889
## iteration 273. Log-lik: -934185.393
## iteration 274. Log-lik: -934085.1
## iteration 275. Log-lik: -934031.017
## iteration 276. Log-lik: -934068.894
## iteration 277. Log-lik: -934075.898
## iteration 278. Log-lik: -934044.266
## iteration 279. Log-lik: -933994.619
## iteration 280. Log-lik: -934076.733
## iteration 281. Log-lik: -934020.429
## iteration 282. Log-lik: -933964.346
## iteration 283. Log-lik: -933952.099
## iteration 284. Log-lik: -933951.53
## iteration 285. Log-lik: -933907.475
## iteration 286. Log-lik: -933880.515
## iteration 287. Log-lik: -933907.475
## iteration 288. Log-lik: -933887.788
## iteration 289. Log-lik: -933911.894
## iteration 290. Log-lik: -933920.027
## iteration 291. Log-lik: -933882.132
## iteration 292. Log-lik: -933923.999
## iteration 293. Log-lik: -933899.373
## iteration 294. Log-lik: -933943.549
## iteration 295. Log-lik: -933868.477
## iteration 296. Log-lik: -933849.171
## iteration 297. Log-lik: -933846.736
## iteration 298. Log-lik: -933864.237
## iteration 299. Log-lik: -933894.189
## iteration 300. Log-lik: -933944.171
## iteration 301. Log-lik: -933978.788
## iteration 302. Log-lik: -933839.818
## iteration 303. Log-lik: -933843.363
## iteration 304. Log-lik: -933788.778
## iteration 305. Log-lik: -933760.535
## iteration 306. Log-lik: -933726.127
## iteration 307. Log-lik: -933817.421
## iteration 308. Log-lik: -933788.129
## iteration 309. Log-lik: -933804.427
## iteration 310. Log-lik: -933783.502
## iteration 311. Log-lik: -933827.439
## iteration 312. Log-lik: -933741.285
## iteration 313. Log-lik: -933776.093
## iteration 314. Log-lik: -933750.808
## iteration 315. Log-lik: -933783.735
## iteration 316. Log-lik: -933709.855
## iteration 317. Log-lik: -933690.162
## iteration 318. Log-lik: -933708.629
## iteration 319. Log-lik: -933681.538
## iteration 320. Log-lik: -933703.399
## iteration 321. Log-lik: -933709.263
## iteration 322. Log-lik: -933731.817
## iteration 323. Log-lik: -933664.928
## iteration 324. Log-lik: -933629.743
## iteration 325. Log-lik: -933677.546
## iteration 326. Log-lik: -933691.095
## iteration 327. Log-lik: -933611.887
## iteration 328. Log-lik: -933596.193
## iteration 329. Log-lik: -933547.946
## iteration 330. Log-lik: -933554.149
## iteration 331. Log-lik: -933556.323
## iteration 332. Log-lik: -933536.171
## iteration 333. Log-lik: -933465.282
## iteration 334. Log-lik: -933457.636
## iteration 335. Log-lik: -933486.926
## iteration 336. Log-lik: -933526.644
## iteration 337. Log-lik: -933516.184
## iteration 338. Log-lik: -933502.848
## iteration 339. Log-lik: -933471.208
## iteration 340. Log-lik: -933468.192
## iteration 341. Log-lik: -933450.493
## iteration 342. Log-lik: -933446.484
## iteration 343. Log-lik: -933421.16
## iteration 344. Log-lik: -933469.214
## iteration 345. Log-lik: -933462.611
## iteration 346. Log-lik: -933474.586
## iteration 347. Log-lik: -933472.64
## iteration 348. Log-lik: -933437.897
## iteration 349. Log-lik: -933452.643
## iteration 350. Log-lik: -933444.904
## iteration 351. Log-lik: -933472.383
## iteration 352. Log-lik: -933418.099
## iteration 353. Log-lik: -933319.845
## iteration 354. Log-lik: -933303.174
## iteration 355. Log-lik: -933261.905
## iteration 356. Log-lik: -933246.59
## iteration 357. Log-lik: -933269.486
## iteration 358. Log-lik: -933327.107
## iteration 359. Log-lik: -933374.367
## iteration 360. Log-lik: -933395.978
## iteration 361. Log-lik: -933279.798
## iteration 362. Log-lik: -933176.949
## iteration 363. Log-lik: -933177.101
## iteration 364. Log-lik: -933134.585
## iteration 365. Log-lik: -933104.925
## iteration 366. Log-lik: -933145.368
## iteration 367. Log-lik: -933123.247
## iteration 368. Log-lik: -933133.311
## iteration 369. Log-lik: -933150.149
## iteration 370. Log-lik: -933123.621
## iteration 371. Log-lik: -933119.557
## iteration 372. Log-lik: -933096.657
## iteration 373. Log-lik: -933118.352
## iteration 374. Log-lik: -933192.214
## iteration 375. Log-lik: -933194.263
## iteration 376. Log-lik: -933146.624
## iteration 377. Log-lik: -933183.452
## iteration 378. Log-lik: -933140.446
## iteration 379. Log-lik: -933120.98
## iteration 380. Log-lik: -933035.907
## iteration 381. Log-lik: -933052.469
## iteration 382. Log-lik: -933074.086
## iteration 383. Log-lik: -933058.294
## iteration 384. Log-lik: -933151.126
## iteration 385. Log-lik: -933124.606
## iteration 386. Log-lik: -933041.889
## iteration 387. Log-lik: -933048.921
## iteration 388. Log-lik: -933035.106
## iteration 389. Log-lik: -933071.015
## iteration 390. Log-lik: -933027.019
## iteration 391. Log-lik: -933060.741
## iteration 392. Log-lik: -933085.464
## iteration 393. Log-lik: -933153.574
## iteration 394. Log-lik: -933043.047
## iteration 395. Log-lik: -933038.108
## iteration 396. Log-lik: -933010.344
## iteration 397. Log-lik: -933033.311
## iteration 398. Log-lik: -933116.522
## iteration 399. Log-lik: -933117.067
## iteration 400. Log-lik: -933150.963
## iteration 401. Log-lik: -933240.061
## iteration 402. Log-lik: -933142.675
## iteration 403. Log-lik: -933076.879
## iteration 404. Log-lik: -933045.965
## iteration 405. Log-lik: -933021.017
## iteration 406. Log-lik: -932964.798
## iteration 407. Log-lik: -932996.187
## iteration 408. Log-lik: -933022.458
## iteration 409. Log-lik: -933060.837
## iteration 410. Log-lik: -933066.606
## iteration 411. Log-lik: -933060.719
## iteration 412. Log-lik: -933001.648
## iteration 413. Log-lik: -932982.799
## iteration 414. Log-lik: -932955.413
## iteration 415. Log-lik: -932998.391
## iteration 416. Log-lik: -932969.997
## iteration 417. Log-lik: -932940.153
## iteration 418. Log-lik: -932977.667
## iteration 419. Log-lik: -932931.572
## iteration 420. Log-lik: -932923.312
## iteration 421. Log-lik: -932951.485
## iteration 422. Log-lik: -932976.126
## iteration 423. Log-lik: -933025.963
## iteration 424. Log-lik: -933018.491
## iteration 425. Log-lik: -933010.775
## iteration 426. Log-lik: -933026.471
## iteration 427. Log-lik: -932934.794
## iteration 428. Log-lik: -932810.083
## iteration 429. Log-lik: -932807.788
## iteration 430. Log-lik: -932876.692
## iteration 431. Log-lik: -932878.676
## iteration 432. Log-lik: -932951.457
## iteration 433. Log-lik: -932918.434
## iteration 434. Log-lik: -932975.949
## iteration 435. Log-lik: -932951.867
## iteration 436. Log-lik: -932917.784
## iteration 437. Log-lik: -932935.654
## iteration 438. Log-lik: -932906.469
## iteration 439. Log-lik: -932850.316
## iteration 440. Log-lik: -932896.574
## iteration 441. Log-lik: -932870.815
## iteration 442. Log-lik: -932876.316
## iteration 443. Log-lik: -932875.793
## iteration 444. Log-lik: -932913.729
## iteration 445. Log-lik: -933016.13
## iteration 446. Log-lik: -932994.037
## iteration 447. Log-lik: -932941.29
## iteration 448. Log-lik: -932983.737
## iteration 449. Log-lik: -932968.157
## iteration 450. Log-lik: -932924.666
## iteration 451. Log-lik: -932915.494
## iteration 452. Log-lik: -932905.155
## iteration 453. Log-lik: -932969.888
## iteration 454. Log-lik: -932882.323
## iteration 455. Log-lik: -932829.925
## iteration 456. Log-lik: -932840.468
## iteration 457. Log-lik: -932743.578
## iteration 458. Log-lik: -932731.834
## iteration 459. Log-lik: -932733.58
## iteration 460. Log-lik: -932770.337
## iteration 461. Log-lik: -932834.398
## iteration 462. Log-lik: -932850.869
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## iteration 1
## iteration 2. Log-lik: -1093703.403
## iteration 3. Log-lik: -1093945.691
## iteration 4. Log-lik: -1051698.426
## iteration 5. Log-lik: -1026796.075
## iteration 6. Log-lik: -1016479.855
## iteration 7. Log-lik: -1010365.648
## iteration 8. Log-lik: -1004576.708
## iteration 9. Log-lik: -998331.867
## iteration 10. Log-lik: -991963.946
## iteration 11. Log-lik: -985743.394
## iteration 12. Log-lik: -980107.898
## iteration 13. Log-lik: -974620.488
## iteration 14. Log-lik: -969787.767
## iteration 15. Log-lik: -965541.101
## iteration 16. Log-lik: -961884.383
## iteration 17. Log-lik: -958659.517
## iteration 18. Log-lik: -955642.541
## iteration 19. Log-lik: -952817.199
## iteration 20. Log-lik: -950379.547
## iteration 21. Log-lik: -947883.909
## iteration 22. Log-lik: -945681.349
## iteration 23. Log-lik: -943581.387
## iteration 24. Log-lik: -941425.753
## iteration 25. Log-lik: -939466.289
## iteration 26. Log-lik: -937576.872
## iteration 27. Log-lik: -935874.303
## iteration 28. Log-lik: -934198.006
## iteration 29. Log-lik: -932824.176
## iteration 30. Log-lik: -931322.794
## iteration 31. Log-lik: -930049.273
## iteration 32. Log-lik: -928866.173
## iteration 33. Log-lik: -927603.813
## iteration 34. Log-lik: -926603.186
## iteration 35. Log-lik: -925518.378
## iteration 36. Log-lik: -924543.651
## iteration 37. Log-lik: -923641.873
## iteration 38. Log-lik: -922796.705
## iteration 39. Log-lik: -921873.759
## iteration 40. Log-lik: -921060.247
## iteration 41. Log-lik: -920311.555
## iteration 42. Log-lik: -919538.256
## iteration 43. Log-lik: -918816.029
## iteration 44. Log-lik: -918086.876
## iteration 45. Log-lik: -917348.676
## iteration 46. Log-lik: -916685.003
## iteration 47. Log-lik: -916088.718
## iteration 48. Log-lik: -915433.52
## iteration 49. Log-lik: -914836.959
## iteration 50. Log-lik: -914282.034
## iteration 51. Log-lik: -913794.595
## iteration 52. Log-lik: -913248.698
## iteration 53. Log-lik: -912771.785
## iteration 54. Log-lik: -912325.033
## iteration 55. Log-lik: -911837.081
## iteration 56. Log-lik: -911415.996
## iteration 57. Log-lik: -911041.637
## iteration 58. Log-lik: -910630.882
## iteration 59. Log-lik: -910259.807
## iteration 60. Log-lik: -909830.572
## iteration 61. Log-lik: -909466.199
## iteration 62. Log-lik: -909118.632
## iteration 63. Log-lik: -908779.586
## iteration 64. Log-lik: -908437.439
## iteration 65. Log-lik: -908056.349
## iteration 66. Log-lik: -907761.321
## iteration 67. Log-lik: -907505.59
## iteration 68. Log-lik: -907258.324
## iteration 69. Log-lik: -906992.932
## iteration 70. Log-lik: -906717.214
## iteration 71. Log-lik: -906501.187
## iteration 72. Log-lik: -906207.799
## iteration 73. Log-lik: -905933.511
## iteration 74. Log-lik: -905732.875
## iteration 75. Log-lik: -905599.186
## iteration 76. Log-lik: -905330.779
## iteration 77. Log-lik: -905127.721
## iteration 78. Log-lik: -904907.248
## iteration 79. Log-lik: -904730.513
## iteration 80. Log-lik: -904550.056
## iteration 81. Log-lik: -904302.443
## iteration 82. Log-lik: -904129.42
## iteration 83. Log-lik: -904023.48
## iteration 84. Log-lik: -903876.441
## iteration 85. Log-lik: -903732.437
## iteration 86. Log-lik: -903584.736
## iteration 87. Log-lik: -903423.159
## iteration 88. Log-lik: -903278.867
## iteration 89. Log-lik: -903111.157
## iteration 90. Log-lik: -902959.733
## iteration 91. Log-lik: -902756.392
## iteration 92. Log-lik: -902634.291
## iteration 93. Log-lik: -902525.689
## iteration 94. Log-lik: -902418.431
## iteration 95. Log-lik: -902249.725
## iteration 96. Log-lik: -902132.75
## iteration 97. Log-lik: -902006.734
## iteration 98. Log-lik: -901877.755
## iteration 99. Log-lik: -901778.751
## iteration 100. Log-lik: -901640.614
## iteration 101. Log-lik: -901532.928
## iteration 102. Log-lik: -901437.709
## iteration 103. Log-lik: -901333.198
## iteration 104. Log-lik: -901209.21
## iteration 105. Log-lik: -901099.305
## iteration 106. Log-lik: -900973.113
## iteration 107. Log-lik: -900884.462
## iteration 108. Log-lik: -900804.137
## iteration 109. Log-lik: -900705.566
## iteration 110. Log-lik: -900610.332
## iteration 111. Log-lik: -900503.12
## iteration 112. Log-lik: -900437.986
## iteration 113. Log-lik: -900339.635
## iteration 114. Log-lik: -900270.769
## iteration 115. Log-lik: -900186.983
## iteration 116. Log-lik: -900150.94
## iteration 117. Log-lik: -900054.252
## iteration 118. Log-lik: -900014.42
## iteration 119. Log-lik: -899938.925
## iteration 120. Log-lik: -899858.768
## iteration 121. Log-lik: -899793.166
## iteration 122. Log-lik: -899726.762
## iteration 123. Log-lik: -899665.851
## iteration 124. Log-lik: -899601.168
## iteration 125. Log-lik: -899534.002
## iteration 126. Log-lik: -899480.76
## iteration 127. Log-lik: -899432.061
## iteration 128. Log-lik: -899339.97
## iteration 129. Log-lik: -899292.504
## iteration 130. Log-lik: -899263.414
## iteration 131. Log-lik: -899204.185
## iteration 132. Log-lik: -899144.318
## iteration 133. Log-lik: -899088.891
## iteration 134. Log-lik: -899024.012
## iteration 135. Log-lik: -898958.8
## iteration 136. Log-lik: -898903.335
## iteration 137. Log-lik: -898872.238
## iteration 138. Log-lik: -898838.837
## iteration 139. Log-lik: -898799.111
## iteration 140. Log-lik: -898729.908
## iteration 141. Log-lik: -898673.314
## iteration 142. Log-lik: -898615.536
## iteration 143. Log-lik: -898602.294
## iteration 144. Log-lik: -898553.406
## iteration 145. Log-lik: -898528.209
## iteration 146. Log-lik: -898479.004
## iteration 147. Log-lik: -898425.488
## iteration 148. Log-lik: -898378.093
## iteration 149. Log-lik: -898335.97
## iteration 150. Log-lik: -898297.456
## iteration 151. Log-lik: -898267.344
## iteration 152. Log-lik: -898224.035
## iteration 153. Log-lik: -898174.197
## iteration 154. Log-lik: -898150.216
## iteration 155. Log-lik: -898100.553
## iteration 156. Log-lik: -898071.604
## iteration 157. Log-lik: -898014.257
## iteration 158. Log-lik: -897989.42
## iteration 159. Log-lik: -897973.85
## iteration 160. Log-lik: -897952.938
## iteration 161. Log-lik: -897921.914
## iteration 162. Log-lik: -897881.7
## iteration 163. Log-lik: -897859.458
## iteration 164. Log-lik: -897821.023
## iteration 165. Log-lik: -897764.677
## iteration 166. Log-lik: -897745.694
## iteration 167. Log-lik: -897700.077
## iteration 168. Log-lik: -897656.33
## iteration 169. Log-lik: -897637.975
## iteration 170. Log-lik: -897609.713
## iteration 171. Log-lik: -897562.036
## iteration 172. Log-lik: -897541.084
## iteration 173. Log-lik: -897528.411
## iteration 174. Log-lik: -897507.875
## iteration 175. Log-lik: -897467.607
## iteration 176. Log-lik: -897438.555
## iteration 177. Log-lik: -897419.685
## iteration 178. Log-lik: -897390.743
## iteration 179. Log-lik: -897359.19
## iteration 180. Log-lik: -897320.96
## iteration 181. Log-lik: -897292.054
## iteration 182. Log-lik: -897286.783
## iteration 183. Log-lik: -897246.789
## iteration 184. Log-lik: -897221.931
## iteration 185. Log-lik: -897165.956
## iteration 186. Log-lik: -897144.8
## iteration 187. Log-lik: -897125.269
## iteration 188. Log-lik: -897100.937
## iteration 189. Log-lik: -897066.156
## iteration 190. Log-lik: -897014.38
## iteration 191. Log-lik: -896994.431
## iteration 192. Log-lik: -896988.137
## iteration 193. Log-lik: -896962.547
## iteration 194. Log-lik: -896946.065
## iteration 195. Log-lik: -896904.442
## iteration 196. Log-lik: -896882.853
## iteration 197. Log-lik: -896864.162
## iteration 198. Log-lik: -896814.538
## iteration 199. Log-lik: -896797.325
## iteration 200. Log-lik: -896775.637
## iteration 201. Log-lik: -896754.141
## iteration 202. Log-lik: -896737.227
## iteration 203. Log-lik: -896705.053
## iteration 204. Log-lik: -896690.813
## iteration 205. Log-lik: -896656.357
## iteration 206. Log-lik: -896629.513
## iteration 207. Log-lik: -896603.417
## iteration 208. Log-lik: -896585.028
## iteration 209. Log-lik: -896551.317
## iteration 210. Log-lik: -896546.897
## iteration 211. Log-lik: -896521.716
## iteration 212. Log-lik: -896489.951
## iteration 213. Log-lik: -896475.629
## iteration 214. Log-lik: -896473.209
## iteration 215. Log-lik: -896454.362
## iteration 216. Log-lik: -896431.593
## iteration 217. Log-lik: -896408.641
## iteration 218. Log-lik: -896373.359
## iteration 219. Log-lik: -896355.425
## iteration 220. Log-lik: -896332.743
## iteration 221. Log-lik: -896308.889
## iteration 222. Log-lik: -896283.38
## iteration 223. Log-lik: -896256.015
## iteration 224. Log-lik: -896228.504
## iteration 225. Log-lik: -896226.222
## iteration 226. Log-lik: -896222.481
## iteration 227. Log-lik: -896190.946
## iteration 228. Log-lik: -896176.085
## iteration 229. Log-lik: -896153.605
## iteration 230. Log-lik: -896146.78
## iteration 231. Log-lik: -896136.323
## iteration 232. Log-lik: -896109.442
## iteration 233. Log-lik: -896091.62
## iteration 234. Log-lik: -896078.094
## iteration 235. Log-lik: -896074.935
## iteration 236. Log-lik: -896064.344
## iteration 237. Log-lik: -896044.848
## iteration 238. Log-lik: -896035.455
## iteration 239. Log-lik: -896004.229
## iteration 240. Log-lik: -895991.472
## iteration 241. Log-lik: -895968.72
## iteration 242. Log-lik: -895953.495
## iteration 243. Log-lik: -895930.272
## iteration 244. Log-lik: -895907.462
## iteration 245. Log-lik: -895883.918
## iteration 246. Log-lik: -895872.572
## iteration 247. Log-lik: -895841.154
## iteration 248. Log-lik: -895817.887
## iteration 249. Log-lik: -895803.112
## iteration 250. Log-lik: -895785.029
## iteration 251. Log-lik: -895777.691
## iteration 252. Log-lik: -895766.024
## iteration 253. Log-lik: -895745.215
## iteration 254. Log-lik: -895723.533
## iteration 255. Log-lik: -895716.188
## iteration 256. Log-lik: -895694.173
## iteration 257. Log-lik: -895673.506
## iteration 258. Log-lik: -895649.89
## iteration 259. Log-lik: -895642.45
## iteration 260. Log-lik: -895631.053
## iteration 261. Log-lik: -895627.765
## iteration 262. Log-lik: -895607.495
## iteration 263. Log-lik: -895600.448
## iteration 264. Log-lik: -895592.34
## iteration 265. Log-lik: -895592.861
## iteration 266. Log-lik: -895557.753
## iteration 267. Log-lik: -895546.294
## iteration 268. Log-lik: -895534.66
## iteration 269. Log-lik: -895518.832
## iteration 270. Log-lik: -895506.576
## iteration 271. Log-lik: -895490.331
## iteration 272. Log-lik: -895465.14
## iteration 273. Log-lik: -895461.33
## iteration 274. Log-lik: -895447.412
## iteration 275. Log-lik: -895435.01
## iteration 276. Log-lik: -895431.08
## iteration 277. Log-lik: -895416.694
## iteration 278. Log-lik: -895388.539
## iteration 279. Log-lik: -895379.866
## iteration 280. Log-lik: -895368.396
## iteration 281. Log-lik: -895350.676
## iteration 282. Log-lik: -895325.332
## iteration 283. Log-lik: -895320.457
## iteration 284. Log-lik: -895316.109
## iteration 285. Log-lik: -895307.205
## iteration 286. Log-lik: -895293.877
## iteration 287. Log-lik: -895281.462
## iteration 288. Log-lik: -895264.92
## iteration 289. Log-lik: -895267.874
## iteration 290. Log-lik: -895254.703
## iteration 291. Log-lik: -895241.368
## iteration 292. Log-lik: -895228.63
## iteration 293. Log-lik: -895210.79
## iteration 294. Log-lik: -895201.746
## iteration 295. Log-lik: -895189.375
## iteration 296. Log-lik: -895164.31
## iteration 297. Log-lik: -895159.626
## iteration 298. Log-lik: -895154.468
## iteration 299. Log-lik: -895152.931
## iteration 300. Log-lik: -895139.787
## iteration 301. Log-lik: -895122.188
## iteration 302. Log-lik: -895100.099
## iteration 303. Log-lik: -895090.67
## iteration 304. Log-lik: -895077.153
## iteration 305. Log-lik: -895076.274
## iteration 306. Log-lik: -895045.324
## iteration 307. Log-lik: -895015.817
## iteration 308. Log-lik: -895006.891
## iteration 309. Log-lik: -895005.671
## iteration 310. Log-lik: -894984.453
## iteration 311. Log-lik: -894967.355
## iteration 312. Log-lik: -894957.191
## iteration 313. Log-lik: -894931.878
## iteration 314. Log-lik: -894918.45
## iteration 315. Log-lik: -894901.912
## iteration 316. Log-lik: -894888.585
## iteration 317. Log-lik: -894879.412
## iteration 318. Log-lik: -894882.191
## iteration 319. Log-lik: -894869.967
## iteration 320. Log-lik: -894860.72
## iteration 321. Log-lik: -894859.247
## iteration 322. Log-lik: -894848.119
## iteration 323. Log-lik: -894846.91
## iteration 324. Log-lik: -894849.673
## iteration 325. Log-lik: -894840.321
## iteration 326. Log-lik: -894843.057
## iteration 327. Log-lik: -894824.241
## iteration 328. Log-lik: -894815.134
## iteration 329. Log-lik: -894813.791
## iteration 330. Log-lik: -894808.245
## iteration 331. Log-lik: -894798.815
## iteration 332. Log-lik: -894797.404
## iteration 333. Log-lik: -894796.028
## iteration 334. Log-lik: -894778.911
## iteration 335. Log-lik: -894773.556
## iteration 336. Log-lik: -894764.188
## iteration 337. Log-lik: -894745.937
## iteration 338. Log-lik: -894736.11
## iteration 339. Log-lik: -894708.422
## iteration 340. Log-lik: -894702.181
## iteration 341. Log-lik: -894700.638
## iteration 342. Log-lik: -894699.14
## iteration 343. Log-lik: -894693.606
## iteration 344. Log-lik: -894675.889
## iteration 345. Log-lik: -894678.318
## iteration 346. Log-lik: -894671.835
## iteration 347. Log-lik: -894670.299
## iteration 348. Log-lik: -894669.749
## iteration 349. Log-lik: -894655.488
## iteration 350. Log-lik: -894649.845
## iteration 351. Log-lik: -894648.092
## iteration 352. Log-lik: -894637.841
## iteration 353. Log-lik: -894631.652
## iteration 354. Log-lik: -894629.877
## iteration 355. Log-lik: -894620.019
## iteration 356. Log-lik: -894618.153
## iteration 357. Log-lik: -894594.595
## iteration 358. Log-lik: -894584.657
## iteration 359. Log-lik: -894582.863
## iteration 360. Log-lik: -894577.217
## iteration 361. Log-lik: -894559.196
## iteration 362. Log-lik: -894553.015
## iteration 363. Log-lik: -894536.359
## iteration 364. Log-lik: -894530.674
## iteration 365. Log-lik: -894532.817
## iteration 366. Log-lik: -894518.295
## iteration 367. Log-lik: -894500.241
## iteration 368. Log-lik: -894489.496
## iteration 369. Log-lik: -894491.545
## iteration 370. Log-lik: -894481.559
## iteration 371. Log-lik: -894483.592
## iteration 372. Log-lik: -894481.423
## iteration 373. Log-lik: -894471.296
## iteration 374. Log-lik: -894472.11
## iteration 375. Log-lik: -894461.787
## iteration 376. Log-lik: -894456.919
## iteration 377. Log-lik: -894454.223
## iteration 378. Log-lik: -894448.237
## iteration 379. Log-lik: -894430.098
## iteration 380. Log-lik: -894415.193
## iteration 381. Log-lik: -894396.904
## iteration 382. Log-lik: -894394.686
## iteration 383. Log-lik: -894380.348
## iteration 384. Log-lik: -894373.204
## iteration 385. Log-lik: -894375.024
## iteration 386. Log-lik: -894372.908
## iteration 387. Log-lik: -894374.717
## iteration 388. Log-lik: -894371.508
## iteration 389. Log-lik: -894365.346
## iteration 390. Log-lik: -894349.599
## iteration 391. Log-lik: -894348.121
## iteration 392. Log-lik: -894349.874
## iteration 393. Log-lik: -894347.431
## iteration 394. Log-lik: -894340.926
## iteration 395. Log-lik: -894338.653
## iteration 396. Log-lik: -894340.316
## iteration 397. Log-lik: -894337.925
## iteration 398. Log-lik: -894330.778
## iteration 399. Log-lik: -894332.455
## iteration 400. Log-lik: -894325.374
## iteration 401. Log-lik: -894323.099
## iteration 402. Log-lik: -894308.135
## iteration 403. Log-lik: -894305.785
## iteration 404. Log-lik: -894291.283
## iteration 405. Log-lik: -894284.784
## iteration 406. Log-lik: -894277.799
## iteration 407. Log-lik: -894263.105
## iteration 408. Log-lik: -894256.019
## iteration 409. Log-lik: -894257.541
## iteration 410. Log-lik: -894251.037
## iteration 411. Log-lik: -894252.549
## iteration 412. Log-lik: -894250.048
## iteration 413. Log-lik: -894251.23
## iteration 414. Log-lik: -894244.021
## iteration 415. Log-lik: -894245.509
## iteration 416. Log-lik: -894241.924
## iteration 417. Log-lik: -894226.747
## iteration 418. Log-lik: -894219.608
## iteration 419. Log-lik: -894217.062
## iteration 420. Log-lik: -894218.52
## iteration 421. Log-lik: -894216.147
## iteration 422. Log-lik: -894217.602
## iteration 423. Log-lik: -894206.456
## iteration 424. Log-lik: -894199.379
## iteration 425. Log-lik: -894188.118
## iteration 426. Log-lik: -894185.504
## iteration 427. Log-lik: -894182.682
## iteration 428. Log-lik: -894175.806
## iteration 429. Log-lik: -894172.491
## iteration 430. Log-lik: -894160.988
## iteration 431. Log-lik: -894151.537
## iteration 432. Log-lik: -894148.785
## iteration 433. Log-lik: -894138.051
## iteration 434. Log-lik: -894131.224
## iteration 435. Log-lik: -894132.535
## iteration 436. Log-lik: -894125.749
## iteration 437. Log-lik: -894109.375
## iteration 438. Log-lik: -894110.655
## iteration 439. Log-lik: -894096.423
## iteration 440. Log-lik: -894093.694
## iteration 441. Log-lik: -894090.742
## iteration 442. Log-lik: -894091.986
## iteration 443. Log-lik: -894085.142
## iteration 444. Log-lik: -894077.575
## iteration 445. Log-lik: -894078.715
## iteration 446. Log-lik: -894071.1
## iteration 447. Log-lik: -894059.02
## iteration 448. Log-lik: -894052.363
## iteration 449. Log-lik: -894040.697
## iteration 450. Log-lik: -894037.901
## iteration 451. Log-lik: -894039.045
## iteration 452. Log-lik: -894040.188
## iteration 453. Log-lik: -894037.234
## iteration 454. Log-lik: -894038.372
## iteration 455. Log-lik: -894031.422
## iteration 456. Log-lik: -894024.341
## iteration 457. Log-lik: -894020.756
## iteration 458. Log-lik: -894017.66
## iteration 459. Log-lik: -894013.895
## iteration 460. Log-lik: -894015.025
## iteration 461. Log-lik: -894008.203
## iteration 462. Log-lik: -894009.323
## iteration 463. Log-lik: -894005.736
## iteration 464. Log-lik: -894006.841
## iteration 465. Log-lik: -894003.984
## iteration 466. Log-lik: -893993.097
## iteration 467. Log-lik: -893990.191
## iteration 468. Log-lik: -893987.331
## iteration 469. Log-lik: -893979.991
## iteration 470. Log-lik: -893981.042
## iteration 471. Log-lik: -893982.091
## iteration 472. Log-lik: -893975.017
## iteration 473. Log-lik: -893976.051
## iteration 474. Log-lik: -893968.461
## iteration 475. Log-lik: -893961.971
## iteration 476. Log-lik: -893963.008
## iteration 477. Log-lik: -893959.986
## iteration 478. Log-lik: -893961.015
## iteration 479. Log-lik: -893962.044
## iteration 480. Log-lik: -893946.49
## iteration 481. Log-lik: -893943.478
## iteration 482. Log-lik: -893936.688
## iteration 483. Log-lik: -893929.74
## iteration 484. Log-lik: -893926.018
## iteration 485. Log-lik: -893918.08
## iteration 486. Log-lik: -893914.979
## iteration 487. Log-lik: -893909.325
## iteration 488. Log-lik: -893910.281
## iteration 489. Log-lik: -893907.181
## iteration 490. Log-lik: -893908.13
## iteration 491. Log-lik: -893909.079
## iteration 492. Log-lik: -893902.139
## iteration 493. Log-lik: -893903.084
## iteration 494. Log-lik: -893904.029
## iteration 495. Log-lik: -893904.972
## iteration 496. Log-lik: -893901.926
## iteration 497. Log-lik: -893890.602
## iteration 498. Log-lik: -893887.323
## iteration 499. Log-lik: -893884.124
## iteration 500. Log-lik: -893882.946
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 462 links to zero.
## Prior versus data weight is tuned to be 100%.
## iteration 1
## iteration 2. Log-lik: -1081575.363
## iteration 3. Log-lik: -1065927.061
## iteration 4. Log-lik: -1062625.874
## iteration 5. Log-lik: -1061128.066
## iteration 6. Log-lik: -1059561.755
## iteration 7. Log-lik: -1058355.645
## iteration 8. Log-lik: -1057414.033
## iteration 9. Log-lik: -1056642.41
## iteration 10. Log-lik: -1055734.173
## iteration 11. Log-lik: -1055003.922
## iteration 12. Log-lik: -1054316.887
## iteration 13. Log-lik: -1053756.937
## iteration 14. Log-lik: -1053300.05
## iteration 15. Log-lik: -1052887.089
## iteration 16. Log-lik: -1052593.876
## iteration 17. Log-lik: -1052387.473
## iteration 18. Log-lik: -1052202.904
## iteration 19. Log-lik: -1052057.146
## iteration 20. Log-lik: -1051955.175
## iteration 21. Log-lik: -1051886.963
## iteration 22. Log-lik: -1051814.334
## iteration 23. Log-lik: -1051775.898
## iteration 24. Log-lik: -1051724.207
## iteration 25. Log-lik: -1051690.667
## iteration 26. Log-lik: -1051647.772
## iteration 27. Log-lik: -1051610.501
## iteration 28. Log-lik: -1051598.519
## iteration 29. Log-lik: -1051582.489
## iteration 30. Log-lik: -1051570.537
## iteration 31. Log-lik: -1051557.673
## iteration 32. Log-lik: -1051556.705
## iteration 33. Log-lik: -1051549.682
## iteration 34. Log-lik: -1051532.66
## iteration 35. Log-lik: -1051513.722
## iteration 36. Log-lik: -1051510.399
## iteration 37. Log-lik: -1051502.735
## iteration 38. Log-lik: -1051494.563
## iteration 39. Log-lik: -1051493.081
## iteration 40. Log-lik: -1051489.972
## iteration 41. Log-lik: -1051490.731
## iteration 42. Log-lik: -1051491.415
## iteration 43. Log-lik: -1051487.237
## iteration 44. Log-lik: -1051487.772
## iteration 45. Log-lik: -1051488.253
## iteration 46. Log-lik: -1051483.847
## iteration 47. Log-lik: -1051484.217
## iteration 48. Log-lik: -1051479.752
## iteration 49. Log-lik: -1051480.055
## iteration 50. Log-lik: -1051480.332
## iteration 51. Log-lik: -1051475.75
## iteration 52. Log-lik: -1051475.975
## iteration 53. Log-lik: -1051476.18
## iteration 54. Log-lik: -1051476.37
## iteration 55. Log-lik: -1051476.544
## iteration 56. Log-lik: -1051471.717
## iteration 57. Log-lik: -1051471.859
## iteration 58. Log-lik: -1051471.99
## iteration 59. Log-lik: -1051472.112
## iteration 60. Log-lik: -1051472.225
## iteration 61. Log-lik: -1051472.33
## iteration 62. Log-lik: -1051475.914
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## iteration 1
## iteration 2. Log-lik: -1093703.996
## iteration 3. Log-lik: -1052599.183
## iteration 4. Log-lik: -1029460.886
## iteration 5. Log-lik: -1019902.835
## iteration 6. Log-lik: -1015119.052
## iteration 7. Log-lik: -1010256.355
## iteration 8. Log-lik: -1005339.363
## iteration 9. Log-lik: -999708.634
## iteration 10. Log-lik: -993640.408
## iteration 11. Log-lik: -987860.125
## iteration 12. Log-lik: -982806.718
## iteration 13. Log-lik: -978087.579
## iteration 14. Log-lik: -973737.08
## iteration 15. Log-lik: -969682.771
## iteration 16. Log-lik: -966432.056
## iteration 17. Log-lik: -963116.948
## iteration 18. Log-lik: -960287.085
## iteration 19. Log-lik: -957596.534
## iteration 20. Log-lik: -955023.592
## iteration 21. Log-lik: -952714.532
## iteration 22. Log-lik: -950375.251
## iteration 23. Log-lik: -948118.689
## iteration 24. Log-lik: -945901.402
## iteration 25. Log-lik: -943774.002
## iteration 26. Log-lik: -941754.027
## iteration 27. Log-lik: -939917.649
## iteration 28. Log-lik: -938127.489
## iteration 29. Log-lik: -936752.056
## iteration 30. Log-lik: -935322.36
## iteration 31. Log-lik: -933819.542
## iteration 32. Log-lik: -932342.048
## iteration 33. Log-lik: -931063.738
## iteration 34. Log-lik: -930012.917
## iteration 35. Log-lik: -929082.111
## iteration 36. Log-lik: -928002.207
## iteration 37. Log-lik: -926985.183
## iteration 38. Log-lik: -925948.501
## iteration 39. Log-lik: -925009.186
## iteration 40. Log-lik: -923980.007
## iteration 41. Log-lik: -923116.441
## iteration 42. Log-lik: -922441.559
## iteration 43. Log-lik: -921682.91
## iteration 44. Log-lik: -921029.963
## iteration 45. Log-lik: -920381.121
## iteration 46. Log-lik: -919579.852
## iteration 47. Log-lik: -918913.788
## iteration 48. Log-lik: -918220.662
## iteration 49. Log-lik: -917699.718
## iteration 50. Log-lik: -917207.552
## iteration 51. Log-lik: -916672.852
## iteration 52. Log-lik: -916269.035
## iteration 53. Log-lik: -915630.041
## iteration 54. Log-lik: -915174.346
## iteration 55. Log-lik: -914695.414
## iteration 56. Log-lik: -914200.354
## iteration 57. Log-lik: -913690.597
## iteration 58. Log-lik: -913239.638
## iteration 59. Log-lik: -912870.137
## iteration 60. Log-lik: -912443.079
## iteration 61. Log-lik: -911987.71
## iteration 62. Log-lik: -911678.406
## iteration 63. Log-lik: -911354.829
## iteration 64. Log-lik: -911039.478
## iteration 65. Log-lik: -910737.925
## iteration 66. Log-lik: -910441.054
## iteration 67. Log-lik: -910210.284
## iteration 68. Log-lik: -909891.221
## iteration 69. Log-lik: -909702.168
## iteration 70. Log-lik: -909501.269
## iteration 71. Log-lik: -909293.118
## iteration 72. Log-lik: -908978.312
## iteration 73. Log-lik: -908691.478
## iteration 74. Log-lik: -908497.114
## iteration 75. Log-lik: -908270.949
## iteration 76. Log-lik: -908098.47
## iteration 77. Log-lik: -907877.548
## iteration 78. Log-lik: -907690.292
## iteration 79. Log-lik: -907418.313
## iteration 80. Log-lik: -907187.773
## iteration 81. Log-lik: -907035.518
## iteration 82. Log-lik: -906884.787
## iteration 83. Log-lik: -906765.25
## iteration 84. Log-lik: -906643.573
## iteration 85. Log-lik: -906505.125
## iteration 86. Log-lik: -906383.328
## iteration 87. Log-lik: -906188.644
## iteration 88. Log-lik: -905995.59
## iteration 89. Log-lik: -905812.13
## iteration 90. Log-lik: -905656.115
## iteration 91. Log-lik: -905507.174
## iteration 92. Log-lik: -905317.304
## iteration 93. Log-lik: -905192.375
## iteration 94. Log-lik: -905098.814
## iteration 95. Log-lik: -904899.045
## iteration 96. Log-lik: -904690.204
## iteration 97. Log-lik: -904571.192
## iteration 98. Log-lik: -904525.802
## iteration 99. Log-lik: -904452.815
## iteration 100. Log-lik: -904341.25
## iteration 101. Log-lik: -904263.699
## iteration 102. Log-lik: -904118.422
## iteration 103. Log-lik: -903988.145
## iteration 104. Log-lik: -903880.111
## iteration 105. Log-lik: -903757.031
## iteration 106. Log-lik: -903638.837
## iteration 107. Log-lik: -903486.53
## iteration 108. Log-lik: -903454.387
## iteration 109. Log-lik: -903450.984
## iteration 110. Log-lik: -903285.443
## iteration 111. Log-lik: -903165.37
## iteration 112. Log-lik: -903081.337
## iteration 113. Log-lik: -903013.573
## iteration 114. Log-lik: -902953.254
## iteration 115. Log-lik: -902869.241
## iteration 116. Log-lik: -902822.366
## iteration 117. Log-lik: -902767.244
## iteration 118. Log-lik: -902689.348
## iteration 119. Log-lik: -902586.137
## iteration 120. Log-lik: -902519.132
## iteration 121. Log-lik: -902464.475
## iteration 122. Log-lik: -902423.542
## iteration 123. Log-lik: -902357.914
## iteration 124. Log-lik: -902301.827
## iteration 125. Log-lik: -902191.79
## iteration 126. Log-lik: -902116.895
## iteration 127. Log-lik: -902027.952
## iteration 128. Log-lik: -901966.066
## iteration 129. Log-lik: -901967.568
## iteration 130. Log-lik: -901936.149
## iteration 131. Log-lik: -901872.837
## iteration 132. Log-lik: -901887.563
## iteration 133. Log-lik: -901831.914
## iteration 134. Log-lik: -901687.068
## iteration 135. Log-lik: -901586.041
## iteration 136. Log-lik: -901535.926
## iteration 137. Log-lik: -901450.908
## iteration 138. Log-lik: -901422.826
## iteration 139. Log-lik: -901413.703
## iteration 140. Log-lik: -901405.517
## iteration 141. Log-lik: -901350.42
## iteration 142. Log-lik: -901263.475
## iteration 143. Log-lik: -901206.81
## iteration 144. Log-lik: -901176.329
## iteration 145. Log-lik: -901172.302
## iteration 146. Log-lik: -901192.714
## iteration 147. Log-lik: -901183.593
## iteration 148. Log-lik: -901122.045
## iteration 149. Log-lik: -900982.527
## iteration 150. Log-lik: -900854.312
## iteration 151. Log-lik: -900671.353
## iteration 152. Log-lik: -900660.488
## iteration 153. Log-lik: -900672.41
## iteration 154. Log-lik: -900687.027
## iteration 155. Log-lik: -900679.969
## iteration 156. Log-lik: -900674.758
## iteration 157. Log-lik: -900618.131
## iteration 158. Log-lik: -900586.078
## iteration 159. Log-lik: -900547.576
## iteration 160. Log-lik: -900561.437
## iteration 161. Log-lik: -900511.519
## iteration 162. Log-lik: -900487.614
## iteration 163. Log-lik: -900375.379
## iteration 164. Log-lik: -900272.816
## iteration 165. Log-lik: -900253.754
## iteration 166. Log-lik: -900197.487
## iteration 167. Log-lik: -900142.667
## iteration 168. Log-lik: -900139.196
## iteration 169. Log-lik: -900128.135
## iteration 170. Log-lik: -900143.743
## iteration 171. Log-lik: -900131.744
## iteration 172. Log-lik: -900156.759
## iteration 173. Log-lik: -900127.932
## iteration 174. Log-lik: -900097.828
## iteration 175. Log-lik: -899995.242
## iteration 176. Log-lik: -899975.393
## iteration 177. Log-lik: -899869.683
## iteration 178. Log-lik: -899872.903
## iteration 179. Log-lik: -899869.292
## iteration 180. Log-lik: -899872.414
## iteration 181. Log-lik: -899769.261
## iteration 182. Log-lik: -899695.209
## iteration 183. Log-lik: -899656.578
## iteration 184. Log-lik: -899683.569
## iteration 185. Log-lik: -899727.414
## iteration 186. Log-lik: -899750.522
## iteration 187. Log-lik: -899798.954
## iteration 188. Log-lik: -899740.408
## iteration 189. Log-lik: -899678.612
## iteration 190. Log-lik: -899610.678
## iteration 191. Log-lik: -899508.838
## iteration 192. Log-lik: -899487.237
## iteration 193. Log-lik: -899507.193
## iteration 194. Log-lik: -899495.043
## iteration 195. Log-lik: -899435.864
## iteration 196. Log-lik: -899374.107
## iteration 197. Log-lik: -899348.626
## iteration 198. Log-lik: -899365.637
## iteration 199. Log-lik: -899373.7
## iteration 200. Log-lik: -899436.706
## iteration 201. Log-lik: -899497.231
## iteration 202. Log-lik: -899469.342
## iteration 203. Log-lik: -899435.006
## iteration 204. Log-lik: -899379.385
## iteration 205. Log-lik: -899352.255
## iteration 206. Log-lik: -899371.282
## iteration 207. Log-lik: -899317.196
## iteration 208. Log-lik: -899312.704
## iteration 209. Log-lik: -899163.318
## iteration 210. Log-lik: -899147.56
## iteration 211. Log-lik: -899110.162
## iteration 212. Log-lik: -899035.935
## iteration 213. Log-lik: -899044.235
## iteration 214. Log-lik: -899053.074
## iteration 215. Log-lik: -899129.04
## iteration 216. Log-lik: -899131.439
## iteration 217. Log-lik: -899152.114
## iteration 218. Log-lik: -899103.91
## iteration 219. Log-lik: -899122.655
## iteration 220. Log-lik: -899081.21
## iteration 221. Log-lik: -899021.95
## iteration 222. Log-lik: -898990.019
## iteration 223. Log-lik: -898915.505
## iteration 224. Log-lik: -898911.184
## iteration 225. Log-lik: -898886.33
## iteration 226. Log-lik: -898886.542
## iteration 227. Log-lik: -898894.537
## iteration 228. Log-lik: -898870.22
## iteration 229. Log-lik: -898900.682
## iteration 230. Log-lik: -898862.142
## iteration 231. Log-lik: -898874.232
## iteration 232. Log-lik: -898906.734
## iteration 233. Log-lik: -898842.559
## iteration 234. Log-lik: -898834.656
## iteration 235. Log-lik: -898832.66
## iteration 236. Log-lik: -898778.965
## iteration 237. Log-lik: -898687.019
## iteration 238. Log-lik: -898682.796
## iteration 239. Log-lik: -898667.312
## iteration 240. Log-lik: -898714.81
## iteration 241. Log-lik: -898752.897
## iteration 242. Log-lik: -898720.327
## iteration 243. Log-lik: -898670.286
## iteration 244. Log-lik: -898643.8
## iteration 245. Log-lik: -898707.908
## iteration 246. Log-lik: -898676.485
## iteration 247. Log-lik: -898670.45
## iteration 248. Log-lik: -898685.663
## iteration 249. Log-lik: -898657.697
## iteration 250. Log-lik: -898574.189
## iteration 251. Log-lik: -898486.143
## iteration 252. Log-lik: -898498.809
## iteration 253. Log-lik: -898484.558
## iteration 254. Log-lik: -898459.911
## iteration 255. Log-lik: -898457.583
## iteration 256. Log-lik: -898435.222
## iteration 257. Log-lik: -898445.877
## iteration 258. Log-lik: -898435.786
## iteration 259. Log-lik: -898443.704
## iteration 260. Log-lik: -898426.72
## iteration 261. Log-lik: -898398.684
## iteration 262. Log-lik: -898419.528
## iteration 263. Log-lik: -898416.672
## iteration 264. Log-lik: -898424.81
## iteration 265. Log-lik: -898342.397
## iteration 266. Log-lik: -898305.094
## iteration 267. Log-lik: -898297.658
## iteration 268. Log-lik: -898288.16
## iteration 269. Log-lik: -898307.038
## iteration 270. Log-lik: -898302.9
## iteration 271. Log-lik: -898204.45
## iteration 272. Log-lik: -898181.1
## iteration 273. Log-lik: -898148.2
## iteration 274. Log-lik: -898174.383
## iteration 275. Log-lik: -898178.936
## iteration 276. Log-lik: -898191.896
## iteration 277. Log-lik: -898254.074
## iteration 278. Log-lik: -898237.642
## iteration 279. Log-lik: -898219.979
## iteration 280. Log-lik: -898196.163
## iteration 281. Log-lik: -898169.954
## iteration 282. Log-lik: -898119.291
## iteration 283. Log-lik: -898049.003
## iteration 284. Log-lik: -898037.841
## iteration 285. Log-lik: -898046.968
## iteration 286. Log-lik: -898108.566
## iteration 287. Log-lik: -898080.512
## iteration 288. Log-lik: -898082.956
## iteration 289. Log-lik: -898022.559
## iteration 290. Log-lik: -898014.754
## iteration 291. Log-lik: -898000.954
## iteration 292. Log-lik: -898028.281
## iteration 293. Log-lik: -898071.27
## iteration 294. Log-lik: -898074.318
## iteration 295. Log-lik: -898075.154
## iteration 296. Log-lik: -898067.028
## iteration 297. Log-lik: -897959.268
## iteration 298. Log-lik: -897906.351
## iteration 299. Log-lik: -897908.744
## iteration 300. Log-lik: -897955.529
## iteration 301. Log-lik: -897938.846
## iteration 302. Log-lik: -897961.08
## iteration 303. Log-lik: -897903.968
## iteration 304. Log-lik: -897897.365
## iteration 305. Log-lik: -897805.231
## iteration 306. Log-lik: -897794.313
## iteration 307. Log-lik: -897862.401
## iteration 308. Log-lik: -897879.396
## iteration 309. Log-lik: -897868.525
## iteration 310. Log-lik: -897861.852
## iteration 311. Log-lik: -897831.754
## iteration 312. Log-lik: -897781.496
## iteration 313. Log-lik: -897772.291
## iteration 314. Log-lik: -897769.383
## iteration 315. Log-lik: -897759.342
## iteration 316. Log-lik: -897786.019
## iteration 317. Log-lik: -897817.233
## iteration 318. Log-lik: -897840.117
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |== | 2% | |== | 3% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
res$pAll
res$dfAUC
## auc method
## 1 0.6495911 poisson
## 2 0.7539683 dirMult
## 3 0.6563252 dirMult_alphaEst
## 4 0.6503127 poisson_lasso
## 5 0.7539683 dirMult_lasso
## 6 0.6565657 dirMult_lasso_alphaEst
## 7 0.6616162 poisson_lasso_repr
## 8 0.7484367 dirMult_lasso_repr
## 9 0.6563252 dirMult_lasso_alphaEst_repr
## 10 0.6772487 viper
## 11 0.6955267 AUCell
nNoiseTFGrid <- c(0, ncol(XAll), ncol(XAll)*2, ncol(XAll)*4) # 100% to 33.3% data-generating TFs
nNoiseEdgeGrid <- c(-sum(abs(XAll))/2, -sum(abs(XAll))/4, 0, sum(abs(XAll))*2, sum(abs(XAll))*4) # -50% to +200% true links
nIter <- 6
pNoLeg <- list()
for(nn in 1:nIter){
# load a dataset
data <- readRDS(paste0("datasets/simIters/dataset",nn,".rds"))
model <- data[[1]]
dataset <- data[[2]]
for(nnTF in 1:length(nNoiseTFGrid)){
nNoiseTFs <- nNoiseTFGrid[nnTF]
for(nnEdge in 1:length(nNoiseEdgeGrid)){
set.seed(nn*10*nnTF*nnEdge)
nNoiseEdges <- nNoiseEdgeGrid[nnEdge]
countsAll <- t(as.matrix(dataset$experiment$counts_premrna + dataset$experiment$counts_mrna))
# only look at TFs
feature_info <- dataset$feature_info
tf_info <- feature_info %>% filter(is_tf)
feature_network <- dataset$feature_network %>% mutate(name = paste0(from, "->", to))
### get true GRN
tf <- as.character(unique(feature_network$from))
targets <- as.character(unique(feature_network$to))
## full GRN including repressions
XAll <- matrix(0, nrow=length(targets), ncol=length(tf),
dimnames=list(targets, tf))
for(tt in 1:ncol(XAll)){
curTF <- tf[tt]
curTFTargets <- as.character(feature_network[feature_network$from == curTF, "to"][[1]])
curTFEffects <- feature_network[feature_network$from == curTF, "effect"][[1]]
XAll[cbind(curTFTargets,curTF)] <- curTFEffects
}
XAll <- XAll[rowSums(abs(XAll))>0,]
XAll <- XAll[,colSums(abs(XAll))>0]
# remove TFs with only repressions
tfsWithOnlyRepression <- apply(XAll, 2, function(x){
all(x == 0 | x == -1)
})
XAll <- XAll[,!tfsWithOnlyRepression]
#barplot(table(rowSums(abs(XAll))))
#barplot(table(colSums(abs(XAll))))
## alpha
alpha <- abs(XAll)
id <- as.matrix(cbind(dataset$feature_network[,2], dataset$feature_network[,1]))
keepId <- id[,1] %in% rownames(XAll) & id[,2] %in% colnames(XAll) & dataset$feature_network$effect == 1
id <- id[keepId,]
# alpha[id] <- dataset$feature_network$strength[keepId]
### more discriminative prior by making this much larger
alpha[id] <- dataset$feature_network$strength[keepId] * 100
# truth: only TFs from burn-in or modules.
truth <- data.frame(de=rep(1, ncol(XAll)),
row.names=colnames(XAll))
truth[grep(x=rownames(truth), pattern="Target|HK"),] <- 0
countsTfTargets <- countsAll[rownames(XAll),]
XNoisy <- addNoiseToGRN(X = XAll,
nNoiseTFs = nNoiseTFs,
nNoiseEdges = nNoiseEdges,
noiseProb = 3/nrow(XAll))
# truth
truthNoisy <- rbind(truth, data.frame(de=rep(0, nNoiseTFs)))
## filter if necessary
keepGene <- rowSums(abs(XNoisy)) > 0
counts <- countsTfTargets[keepGene,]
curAlpha <- alpha[keepGene,]
XNoisy <- XNoisy[keepGene,]
keepTF <- colSums(abs(XNoisy)) > 0
truthNoisy <- truthNoisy[keepTF,,drop=FALSE]
# get true alpha for noisy GRN
if(nNoiseTFs > 0){
alphaTrueNoisy <- cbind(curAlpha, XNoisy[,(ncol(XAll)+1):(ncol(XAll)+nNoiseTFs)])
} else {
alphaTrueNoisy <- curAlpha
}
XNoisy <- XNoisy[,keepTF]
alphaTrueNoisy <- alphaTrueNoisy[,keepTF]
alphaTrueNoisy[XNoisy == 0] <- 0
rownames(truthNoisy) <- colnames(XNoisy)
# viper regulon
regulonNoisy <- constructViperRegulon(XNoisy, alphaTrueNoisy)
# AUCell genesets
genesetsNoisy <- constructGenesets(XNoisy, alphaTrueNoisy)
qSteps <- 0.05
ptGroups <- Hmisc::cut2(pt, cuts = quantile(dataset$experiment$cell_info$sim_time, prob=seq(0,1,by=qSteps)))
Xpt <- model.matrix(~0+ptGroups)
design <- Xpt
res <- evaluateSimulation_repressions(counts = countsAll,
design = design,
X = XNoisy,
alpha = alphaTrueNoisy,
regulon = regulonNoisy,
genesets = genesetsNoisy,
truth = truthNoisy,
verbose = FALSE,
alphaScale = 1,
iterMax = 1000)
curDfAUC <- res$dfAUC
curDfAUC$noiseEdges <- nNoiseEdges
curDfAUC$noiseTFs <- nNoiseTFs
curDfAUC$nn <- nn
if(!exists("dfAUCAll")){
dfAUCAll <- curDfAUC
} else {
dfAUCAll <- rbind(dfAUCAll, curDfAUC)
}
}
}
saveRDS(dfAUCAll, file=paste0("../objects/dfAUCAll_dyngen",nn,"_discriminativePrior.rds"))
}
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 156 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 156 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 192 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 156 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 3% | |== | 4% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 83 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 83 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 323 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 83 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 37 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 37 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 393 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 37 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 88 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 88 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 318 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 88 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 41 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 41 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 411 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 41 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 22 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 22 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 437 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 22 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 35 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 35 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 424 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 35 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 23 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 23 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 442 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 23 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 9 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 9 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 466 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 9 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 454 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 7 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 7 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 488 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 7 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 496 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 506 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 507 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 110 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 110 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 280 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 110 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 48 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 48 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 399 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 48 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 458 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 50 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 50 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 369 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 50 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 27 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 27 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 422 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 27 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 11 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 11 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 11 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 27 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 27 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 410 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 27 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 449 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 482 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 437 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 477 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 491 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 493 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 188 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 188 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 177 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 188 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 107 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 107 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 302 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 107 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 57 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 57 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 383 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 57 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 102 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 102 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 310 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 102 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 60 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 60 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 383 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 60 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 26 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 26 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 436 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 26 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 480 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## Warning in variance_prior(ql_disp, df, covariate = gene_means, abundance_trend = ql_disp_trend): Variance prior estimate did not properly converge
## Warning in variance_prior(ql_disp, df, covariate = gene_means, abundance_trend = ql_disp_trend): Variance prior estimate did not properly converge
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 59 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 59 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 364 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 59 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 21 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 21 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 444 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 21 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 455 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 17 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 427 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 469 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 7 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 7 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 465 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 7 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 481 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 144 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 144 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 245 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 144 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 66 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 66 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 367 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 66 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |== | 2% | |== | 3% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 38 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 38 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 411 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 38 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 53 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 53 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 388 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 53 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 43 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 43 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 403 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 43 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 18 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 18 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 439 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 18 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 34 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 34 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 400 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 34 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 440 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 458 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 439 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 14 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 454 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 6 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 459 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 5 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 468 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 122 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 122 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 298 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 122 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 53 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 53 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 455 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 53 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 23 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 23 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 514 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 23 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 56 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 56 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 442 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 56 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 33 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 33 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 497 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 33 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 10 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 10 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 552 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## Warning in variance_prior(ql_disp, df, covariate = gene_means, abundance_trend = ql_disp_trend): Variance prior estimate did not properly converge
## Warning in variance_prior(ql_disp, df, covariate = gene_means, abundance_trend = ql_disp_trend): Variance prior estimate did not properly converge
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 10 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## Warning: Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 26 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 26 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 503 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 26 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 555 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 11 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 11 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 561 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 11 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 523 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 8 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 3 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 3 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 565 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 3 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 4 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 4 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 574 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 4 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 584 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 112 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 112 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 253 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 112 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |== | 2% | |== | 3% | |=== | 4% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 61 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 61 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 340 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 61 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 32 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 32 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 390 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 32 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 463 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 1 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 464 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 56 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 56 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 359 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 56 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 33 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 33 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 396 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 33 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 428 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 462 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 464 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 28 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 28 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 391 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 28 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 421 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 16 genes with only between-gene repressions.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |== | 4% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 9 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 9 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 437 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 9 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 464 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 10% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 30% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 50% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 70% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 90% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 464 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 20% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 40% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 60% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 80% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 10 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 10 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 423 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 10 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 4 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 4 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 452 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 4 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========= | 14% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 2 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 2 genes with only between-gene repressions.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 458 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Pruning 2 genes with only between-gene repressions.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================ | 24% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : Zero sample variances detected, have been offset away from zero
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 464 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |===================================== | 54% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================ | 64% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |=================================================== | 74% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## alpha_gt cannot be positive if there is no edge in the GRN, setting alpha for 464 links to zero.
## Prior versus data weight is tuned to be 100%.
## Converged.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
##
## Computing the association scores
## Computing regulons enrichment with aREA
## | | | 0% | | | 1% | |= | 1% | |= | 2% | |== | 2% | |== | 3% | |=== | 4% | |=== | 5% | |==== | 5% | |==== | 6% | |===== | 6% | |===== | 7% | |===== | 8% | |====== | 8% | |====== | 9% | |======= | 9% | |======= | 10% | |======= | 11% | |======== | 11% | |======== | 12% | |========= | 12% | |========= | 13% | |========== | 14% | |========== | 15% | |=========== | 15% | |=========== | 16% | |============ | 16% | |============ | 17% | |============ | 18% | |============= | 18% | |============= | 19% | |============== | 19% | |============== | 20% | |============== | 21% | |=============== | 21% | |=============== | 22% | |================ | 22% | |================ | 23% | |================= | 24% | |================= | 25% | |================== | 25% | |================== | 26% | |=================== | 27% | |=================== | 28% | |==================== | 28% | |==================== | 29% | |===================== | 29% | |===================== | 30% | |===================== | 31% | |====================== | 31% | |====================== | 32% | |======================= | 32% | |======================= | 33% | |======================= | 34% | |======================== | 34% | |======================== | 35% | |========================= | 35% | |========================= | 36% | |========================== | 37% | |========================== | 38% | |=========================== | 38% | |=========================== | 39% | |============================ | 39% | |============================ | 40% | |============================ | 41% | |============================= | 41% | |============================= | 42% | |============================== | 42% | |============================== | 43% | |============================== | 44% | |=============================== | 44% | |=============================== | 45% | |================================ | 45% | |================================ | 46% | |================================= | 47% | |================================= | 48% | |================================== | 48% | |================================== | 49% | |=================================== | 49% | |=================================== | 50% | |=================================== | 51% | |==================================== | 51% | |==================================== | 52% | |===================================== | 52% | |===================================== | 53% | |====================================== | 54% | |====================================== | 55% | |======================================= | 55% | |======================================= | 56% | |======================================== | 56% | |======================================== | 57% | |======================================== | 58% | |========================================= | 58% | |========================================= | 59% | |========================================== | 59% | |========================================== | 60% | |========================================== | 61% | |=========================================== | 61% | |=========================================== | 62% | |============================================ | 62% | |============================================ | 63% | |============================================= | 64% | |============================================= | 65% | |============================================== | 65% | |============================================== | 66% | |=============================================== | 66% | |=============================================== | 67% | |=============================================== | 68% | |================================================ | 68% | |================================================ | 69% | |================================================= | 69% | |================================================= | 70% | |================================================= | 71% | |================================================== | 71% | |================================================== | 72% | |=================================================== | 72% | |=================================================== | 73% | |==================================================== | 74% | |==================================================== | 75% | |===================================================== | 75% | |===================================================== | 76% | |====================================================== | 77% | |====================================================== | 78% | |======================================================= | 78% | |======================================================= | 79% | |======================================================== | 79% | |======================================================== | 80% | |======================================================== | 81% | |========================================================= | 81% | |========================================================= | 82% | |========================================================== | 82% | |========================================================== | 83% | |========================================================== | 84% | |=========================================================== | 84% | |=========================================================== | 85% | |============================================================ | 85% | |============================================================ | 86% | |============================================================= | 87% | |============================================================= | 88% | |============================================================== | 88% | |============================================================== | 89% | |=============================================================== | 89% | |=============================================================== | 90% | |=============================================================== | 91% | |================================================================ | 91% | |================================================================ | 92% | |================================================================= | 92% | |================================================================= | 93% | |================================================================= | 94% | |================================================================== | 94% | |================================================================== | 95% | |=================================================================== | 95% | |=================================================================== | 96% | |==================================================================== | 97% | |==================================================================== | 98% | |===================================================================== | 98% | |===================================================================== | 99% | |======================================================================| 99% | |======================================================================| 100%
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
## Warning in .AUCell_buildRankings(exprMat = exprMat, featureType = featureType,
## : nCores is no longer used. It will be deprecated in the next AUCell version.
## topTableF is obsolete and will be removed in a future version of limma. Please considering using topTable instead.
date <- Sys.Date()
date <- gsub(x=date, pattern="-", replacement="")
saveRDS(dfAUCAll, file=paste0("../objects/dfAUCAll_dyngen",date,"_discriminativePrior.rds"))
for(nn in 1:6){
curdfAUC <- readRDS(paste0("../objects/dfAUCAll_dyngen",nn,".rds"))
curdfAUC$nn <- nn
if(nn==1){
dfAUCAll <- curdfAUC
} else {
dfAUCAll <- rbind(dfAUCAll, curdfAUC)
}
}
# dfAUCAll <- readRDS("../objects/dfAUCAll_dyngen20210502.rds")
dfAUCAll$method <- factor(dfAUCAll$method,
levels = c("poisson", "poisson_lasso", "poisson_lasso_repr",
"dirMult", "dirMult_lasso", "dirMult_lasso_repr",
"dirMult_alphaEst", "dirMult_lasso_alphaEst", "dirMult_lasso_alphaEst_repr",
"viper", "AUCell"))
ggplot(dfAUCAll, aes(x=method, y=auc)) +
geom_boxplot(aes(col=method)) +
facet_wrap(.~noiseTFs * noiseEdges,
ncol = length(nNoiseEdgeGrid),
nrow = length(nNoiseTFGrid)) +
theme_bw() +
theme(axis.text.x = element_blank(),
axis.ticks.x = element_blank()) +
scale_y_continuous(n.breaks=4) +
xlab("") + ylab("AUC")
ggsave("../../plots/domino/aucAllSettings_dyngen_discriminativePrior.pdf", width=12, height=9)